use InterpolatedStringHandler to move values to CustomDimensions Tags instead of keeping them in the error message (#2450)

* use InterpolatedStringHandler to move values to CustomDimensions Tags instead of keeping them in the error message

* log blob save raw response failure

* add StringBuilder to CSharpExtensions

Co-authored-by: stas <statis@microsoft.com>
This commit is contained in:
Stas
2022-09-27 15:22:29 -07:00
committed by GitHub
parent b3748e4283
commit 476c99a998
80 changed files with 574 additions and 539 deletions

View File

@ -30,8 +30,9 @@ public class AgentCanSchedule {
var canScheduleRequest = request.OkV; var canScheduleRequest = request.OkV;
var node = await _context.NodeOperations.GetByMachineId(canScheduleRequest.MachineId); var node = await _context.NodeOperations.GetByMachineId(canScheduleRequest.MachineId);
if (node == null) { if (node == null) {
_log.Warning($"Unable to find node {canScheduleRequest.MachineId}"); _log.Warning($"Unable to find {canScheduleRequest.MachineId:Tag:MachineId}");
return await _context.RequestHandling.NotOk( return await _context.RequestHandling.NotOk(
req, req,
new Error( new Error(
@ -52,14 +53,14 @@ public class AgentCanSchedule {
var workStopped = task == null || task.State.ShuttingDown(); var workStopped = task == null || task.State.ShuttingDown();
if (workStopped) { if (workStopped) {
_log.Info($"Work stopped {canScheduleRequest.MachineId}"); _log.Info($"Work stopped for: {canScheduleRequest.MachineId:Tag:MachineId} and {canScheduleRequest.TaskId:Tag:TaskId}");
allowed = false; allowed = false;
} }
if (allowed) { if (allowed) {
var scp = await _context.NodeOperations.AcquireScaleInProtection(node); var scp = await _context.NodeOperations.AcquireScaleInProtection(node);
if (!scp.IsOk) { if (!scp.IsOk) {
_log.Warning($"Failed to acquire scale in protection due to {scp.ErrorV}"); _log.Warning($"Failed to acquire scale in protection for: {node.MachineId:Tag:MachineId} in: {node.PoolName:Tag:PoolName} due to {scp.ErrorV:Tag:Error}");
} }
allowed = scp.IsOk; allowed = scp.IsOk;
} }

View File

@ -38,7 +38,7 @@ public class AgentCommands {
var envelope = new NodeCommandEnvelope(command, messageId); var envelope = new NodeCommandEnvelope(command, messageId);
return await RequestHandling.Ok(req, new PendingNodeCommand(envelope)); return await RequestHandling.Ok(req, new PendingNodeCommand(envelope));
} else { } else {
_log.WithTag("Command", "GET").Verbose($"failed to find machine id {nodeCommand.MachineId}"); _log.WithTag("HttpRequest", "GET").Verbose($"failed to find {nodeCommand.MachineId:Tag:MachineId}");
return await RequestHandling.Ok(req, new PendingNodeCommand(null)); return await RequestHandling.Ok(req, new PendingNodeCommand(null));
} }
} }
@ -54,7 +54,7 @@ public class AgentCommands {
if (message != null) { if (message != null) {
await _context.NodeMessageOperations.Delete(message).IgnoreResult(); await _context.NodeMessageOperations.Delete(message).IgnoreResult();
} else { } else {
_log.WithTag("Command", "DELETE").Verbose($"failed to find machine id {nodeCommand.MachineId} for message {nodeCommand.MessageId}"); _log.WithTag("HttpRequest", "DELETE").Verbose($"failed to find {nodeCommand.MachineId:Tag:MachineId} for {nodeCommand.MessageId:Tag:MessageId}");
} }
return await RequestHandling.Ok(req, new BoolResult(true)); return await RequestHandling.Ok(req, new BoolResult(true));

View File

@ -29,7 +29,7 @@ public class AgentEvents {
} }
var envelope = request.OkV; var envelope = request.OkV;
_log.Info($"node event: machine_id: {envelope.MachineId} event: {EntityConverter.ToJsonString(envelope)}"); _log.WithTag("HttpRequest", "POST").Info($"node event: {envelope.MachineId:Tag:MachineId} {EntityConverter.ToJsonString(envelope):Tag:Event}");
var error = envelope.Event switch { var error = envelope.Event switch {
NodeStateUpdate updateEvent => await OnStateUpdate(envelope.MachineId, updateEvent), NodeStateUpdate updateEvent => await OnStateUpdate(envelope.MachineId, updateEvent),
@ -66,20 +66,20 @@ public class AgentEvents {
private async Async.Task<Error?> OnStateUpdate(Guid machineId, NodeStateUpdate ev) { private async Async.Task<Error?> OnStateUpdate(Guid machineId, NodeStateUpdate ev) {
var node = await _context.NodeOperations.GetByMachineId(machineId); var node = await _context.NodeOperations.GetByMachineId(machineId);
if (node is null) { if (node is null) {
_log.Warning($"unable to process state update event. machine_id:{machineId} state event:{ev}"); _log.Warning($"unable to process state update event. {machineId:Tag:MachineId} {ev:Tag:Event}");
return null; return null;
} }
if (ev.State == NodeState.Free) { if (ev.State == NodeState.Free) {
if (node.ReimageRequested || node.DeleteRequested) { if (node.ReimageRequested || node.DeleteRequested) {
_log.Info($"stopping free node with reset flags: {machineId}"); _log.Info($"stopping free node with reset flags: {machineId:Tag:MachineId}");
// discard result: node not used after this point // discard result: node not used after this point
_ = await _context.NodeOperations.Stop(node); _ = await _context.NodeOperations.Stop(node);
return null; return null;
} }
if (await _context.NodeOperations.CouldShrinkScaleset(node)) { if (await _context.NodeOperations.CouldShrinkScaleset(node)) {
_log.Info($"stopping free node to resize scaleset: {machineId}"); _log.Info($"stopping free node to resize scaleset: {machineId:Tag:MachineId}");
// discard result: node not used after this point // discard result: node not used after this point
_ = await _context.NodeOperations.SetHalt(node); _ = await _context.NodeOperations.SetHalt(node);
return null; return null;
@ -88,7 +88,7 @@ public class AgentEvents {
if (ev.State == NodeState.Init) { if (ev.State == NodeState.Init) {
if (node.DeleteRequested) { if (node.DeleteRequested) {
_log.Info($"stopping node (init and delete_requested): {machineId}"); _log.Info($"stopping node (init and delete_requested): {machineId:Tag:MachineId}");
// discard result: node not used after this point // discard result: node not used after this point
_ = await _context.NodeOperations.Stop(node); _ = await _context.NodeOperations.Stop(node);
return null; return null;
@ -103,11 +103,11 @@ public class AgentEvents {
return null; return null;
} }
_log.Info($"node state update: {machineId} from {node.State} to {ev.State}"); _log.Info($"node state update: {machineId:Tag:MachineId} from {node.State:Tag:FromState} to {ev.State:Tag:ToState}");
node = await _context.NodeOperations.SetState(node, ev.State); node = await _context.NodeOperations.SetState(node, ev.State);
if (ev.State == NodeState.Free) { if (ev.State == NodeState.Free) {
_log.Info($"node now available for work: {machineId}"); _log.Info($"node now available for work: {machineId:Tag:MachineId}");
} else if (ev.State == NodeState.SettingUp) { } else if (ev.State == NodeState.SettingUp) {
if (ev.Data is NodeSettingUpEventData settingUpData) { if (ev.Data is NodeSettingUpEventData settingUpData) {
if (!settingUpData.Tasks.Any()) { if (!settingUpData.Tasks.Any()) {
@ -124,7 +124,7 @@ public class AgentEvents {
Errors: new string[] { $"unable to find task: {taskId}" }); Errors: new string[] { $"unable to find task: {taskId}" });
} }
_log.Info($"node starting task. machine_id: {machineId} job_id: {task.JobId} task_id: {task.TaskId}"); _log.Info($"node starting task. {machineId:Tag:MachineId} {task.JobId:Tag:JobId} {task.TaskId:Tag:TaskId}");
// The task state may be `running` if it has `vm_count` > 1, and // The task state may be `running` if it has `vm_count` > 1, and
// another node is concurrently executing the task. If so, leave // another node is concurrently executing the task. If so, leave
@ -142,7 +142,7 @@ public class AgentEvents {
State: NodeTaskState.SettingUp); State: NodeTaskState.SettingUp);
var r = await _context.NodeTasksOperations.Replace(nodeTask); var r = await _context.NodeTasksOperations.Replace(nodeTask);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"Failed to replace node task {task.TaskId}"); _log.WithHttpStatus(r.ErrorV).Error($"Failed to replace node task {task.TaskId:Tag:TaskId}");
} }
} }
} }
@ -152,7 +152,7 @@ public class AgentEvents {
if (doneData.Error is not null) { if (doneData.Error is not null) {
var errorText = EntityConverter.ToJsonString(doneData); var errorText = EntityConverter.ToJsonString(doneData);
error = new Error(ErrorCode.TASK_FAILED, Errors: new string[] { errorText }); error = new Error(ErrorCode.TASK_FAILED, Errors: new string[] { errorText });
_log.Error($"node 'done' with error: machine_id:{machineId}, data:{errorText}"); _log.Error($"node 'done' {machineId:Tag:MachineId} - {errorText:Tag:Error}");
} }
} }
@ -208,15 +208,15 @@ public class AgentEvents {
State: NodeTaskState.Running); State: NodeTaskState.Running);
var r = await _context.NodeTasksOperations.Replace(nodeTask); var r = await _context.NodeTasksOperations.Replace(nodeTask);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to replace node task {nodeTask.TaskId}"); _log.WithHttpStatus(r.ErrorV).Error($"failed to replace node task {nodeTask.TaskId:Tag:TaskId}");
} }
if (task.State.ShuttingDown()) { if (task.State.ShuttingDown()) {
_log.Info($"ignoring task start from node. machine_id:{machineId} job_id:{task.JobId} task_id:{task.TaskId} (state: {task.State})"); _log.Info($"ignoring task start from node. {machineId:Tag:MachineId} {task.JobId:Tag:JobId} {task.TaskId:Tag:TaskId} ({task.State:Tag:State})");
return null; return null;
} }
_log.Info($"task started on node. machine_id:{machineId} job_id:{task.JobId} task_id:{task.TaskId}"); _log.Info($"task started on node. {machineId:Tag:MachineId} {task.JobId:Tag:JobId} {task.TaskId:Tag:TaskId}");
task = await _context.TaskOperations.SetState(task, TaskState.Running); task = await _context.TaskOperations.SetState(task, TaskState.Running);
var taskEvent = new TaskEvent( var taskEvent = new TaskEvent(
@ -225,7 +225,7 @@ public class AgentEvents {
EventData: new WorkerEvent(Running: running)); EventData: new WorkerEvent(Running: running));
r = await _context.TaskEventOperations.Replace(taskEvent); r = await _context.TaskEventOperations.Replace(taskEvent);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to replace taskEvent for task with id {taskEvent.TaskId}"); _log.WithHttpStatus(r.ErrorV).Error($"failed to replace taskEvent {taskEvent.TaskId:Tag:TaskId}");
} }
return null; return null;
@ -255,7 +255,7 @@ public class AgentEvents {
}; };
if (done.ExitStatus.Success) { if (done.ExitStatus.Success) {
_log.Info($"task done. {task.JobId}:{task.TaskId} status:{done.ExitStatus}"); _log.Info($"task done. {task.JobId:Tag:JobId}:{task.TaskId:Tag:TaskId} {done.ExitStatus:Tag:Status}");
await _context.TaskOperations.MarkStopping(task); await _context.TaskOperations.MarkStopping(task);
// keep node if keep-on-completion is set // keep node if keep-on-completion is set
@ -263,7 +263,7 @@ public class AgentEvents {
node = node with { DebugKeepNode = true }; node = node with { DebugKeepNode = true };
var r = await _context.NodeOperations.Replace(node); var r = await _context.NodeOperations.Replace(node);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"keepNodeOnCompletion: failed to replace node {node.MachineId} when setting debug keep node to true"); _log.WithHttpStatus(r.ErrorV).Error($"keepNodeOnCompletion: failed to replace node {node.MachineId:Tag:MachineId} when setting debug keep node to true");
} }
} }
} else { } else {
@ -283,7 +283,7 @@ public class AgentEvents {
node = node with { DebugKeepNode = true }; node = node with { DebugKeepNode = true };
var r = await _context.NodeOperations.Replace(node); var r = await _context.NodeOperations.Replace(node);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"keepNodeOnfFailure: failed to replace node {node.MachineId} when setting debug keep node to true"); _log.WithHttpStatus(r.ErrorV).Error($"keepNodeOnfFailure: failed to replace node {node.MachineId:Tag:MachineId} when setting debug keep node to true");
} }
} }
} }
@ -291,14 +291,14 @@ public class AgentEvents {
if (!node.DebugKeepNode) { if (!node.DebugKeepNode) {
var r = await _context.NodeTasksOperations.Delete(new NodeTasks(machineId, done.TaskId)); var r = await _context.NodeTasksOperations.Delete(new NodeTasks(machineId, done.TaskId));
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to deleting node task since DebugKeepNode is false"); _log.WithHttpStatus(r.ErrorV).Error($"failed to deleting node task {done.TaskId:Tag:TaskId} for: {machineId:Tag:MachineId} since DebugKeepNode is false");
} }
} }
var taskEvent = new TaskEvent(done.TaskId, machineId, new WorkerEvent { Done = done }); var taskEvent = new TaskEvent(done.TaskId, machineId, new WorkerEvent { Done = done });
var r1 = await _context.TaskEventOperations.Replace(taskEvent); var r1 = await _context.TaskEventOperations.Replace(taskEvent);
if (!r1.IsOk) { if (!r1.IsOk) {
_log.WithHttpStatus(r1.ErrorV).Error($"failed to update task event for done task {done.TaskId}"); _log.WithHttpStatus(r1.ErrorV).Error($"failed to update task event for done task {done.TaskId:Tag:TaskId}");
} }
return null; return null;
} }

View File

@ -109,7 +109,7 @@ public class AgentRegistration {
var version = uri["version"] ?? "1.0.0"; var version = uri["version"] ?? "1.0.0";
_log.Info($"registration request: machine_id: {machineId} pool_name: {poolName} scaleset_id: {scalesetId} version: {version}"); _log.Info($"registration request: {machineId:Tag:MachineId} {poolName:Tag:PoolName} {scalesetId:Tag:ScalesetId} {version:Tag:Version}");
var poolResult = await _context.PoolOperations.GetByName(poolName); var poolResult = await _context.PoolOperations.GetByName(poolName);
if (!poolResult.IsOk) { if (!poolResult.IsOk) {
return await _context.RequestHandling.NotOk( return await _context.RequestHandling.NotOk(
@ -136,7 +136,7 @@ public class AgentRegistration {
var r = await _context.NodeOperations.Replace(node); var r = await _context.NodeOperations.Replace(node);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).WithTag("MachineId", node.MachineId.ToString()).Error("failed to replace node operations for node {MachineId}"); _log.WithHttpStatus(r.ErrorV).Error($"failed to replace node operations for {node.MachineId:Tag:MachineId}");
} }
return await RequestHandling.Ok(req, await CreateRegistrationResponse(pool)); return await RequestHandling.Ok(req, await CreateRegistrationResponse(pool));

View File

@ -74,7 +74,7 @@ public class ContainersFunction {
} }
var delete = request.OkV; var delete = request.OkV;
_logger.Info($"container - deleting {delete.Name}"); _logger.Info($"deleting {delete.Name:Tag:ContainerName}");
var container = await _context.Containers.FindContainer(delete.Name, StorageType.Corpus); var container = await _context.Containers.FindContainer(delete.Name, StorageType.Corpus);
var deleted = false; var deleted = false;
@ -92,7 +92,7 @@ public class ContainersFunction {
} }
var post = request.OkV; var post = request.OkV;
_logger.Info($"container - creating {post.Name}"); _logger.Info($"creating {post.Name:Tag:ContainerName}");
var sas = await _context.Containers.CreateContainer( var sas = await _context.Containers.CreateContainer(
post.Name, post.Name,
StorageType.Corpus, StorageType.Corpus,

View File

@ -61,7 +61,7 @@ public class InstanceConfig {
await _context.ConfigOperations.Save(request.OkV.config, false, false); await _context.ConfigOperations.Save(request.OkV.config, false, false);
if (updateNsg) { if (updateNsg) {
await foreach (var nsg in _context.NsgOperations.ListNsgs()) { await foreach (var nsg in _context.NsgOperations.ListNsgs()) {
_log.Info($"Checking if nsg: {nsg.Data.Location!} ({nsg.Data.Name}) owned by OneFuzz"); _log.Info($"Checking if nsg: {nsg.Data.Location!:Tag:Location} ({nsg.Data.Name:Tag:NsgName}) owned by OneFuzz");
if (nsg.Data.Location! == nsg.Data.Name) { if (nsg.Data.Location! == nsg.Data.Name) {
var result = await _context.NsgOperations.SetAllowedSources(new Nsg(nsg.Data.Location!, nsg.Data.Location!), request.OkV.config.ProxyNsgConfig!); var result = await _context.NsgOperations.SetAllowedSources(new Nsg(nsg.Data.Location!, nsg.Data.Location!), request.OkV.config.ProxyNsgConfig!);
if (!result.IsOk) { if (!result.IsOk) {

View File

@ -70,7 +70,7 @@ public class Jobs {
job = job with { Config = job.Config with { Logs = logContainerUri.ToString() } }; job = job with { Config = job.Config with { Logs = logContainerUri.ToString() } };
var r = await _context.JobOperations.Insert(job); var r = await _context.JobOperations.Insert(job);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert job {job.JobId}"); _logTracer.WithTag("HttpRequest", "POST").WithHttpStatus(r.ErrorV).Error($"failed to insert job {job.JobId:Tag:JobId}");
} }
return await RequestHandling.Ok(req, JobResponse.ForJob(job)); return await RequestHandling.Ok(req, JobResponse.ForJob(job));
} }
@ -96,7 +96,7 @@ public class Jobs {
job = job with { State = JobState.Stopping }; job = job with { State = JobState.Stopping };
var r = await _context.JobOperations.Replace(job); var r = await _context.JobOperations.Replace(job);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace job {job.JobId}"); _logTracer.WithTag("HttpRequest", "DELETE").WithHttpStatus(r.ErrorV).Error($"Failed to replace job {job.JobId:Tag:JobId}");
} }
} }

View File

@ -142,7 +142,7 @@ public class Node {
var r = await _context.NodeOperations.Replace(node); var r = await _context.NodeOperations.Replace(node);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"Failed to replace node {node.MachineId}"); _log.WithTag("HttpRequest", "POST").WithHttpStatus(r.ErrorV).Error($"Failed to replace node {node.MachineId:Tag:MachineId}");
} }
return await RequestHandling.Ok(req, true); return await RequestHandling.Ok(req, true);
} }
@ -176,7 +176,7 @@ public class Node {
if (node.DebugKeepNode) { if (node.DebugKeepNode) {
var r = await _context.NodeOperations.Replace(node with { DebugKeepNode = false }); var r = await _context.NodeOperations.Replace(node with { DebugKeepNode = false });
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"Failed to replace node {node.MachineId}"); _log.WithTag("HttpRequest", "DELETE").WithHttpStatus(r.ErrorV).Error($"Failed to replace node {node.MachineId:Tag:MachineId}");
} }
} }

View File

@ -16,7 +16,7 @@ public class Notifications {
} }
private async Async.Task<HttpResponseData> Get(HttpRequestData req) { private async Async.Task<HttpResponseData> Get(HttpRequestData req) {
_log.Info("Notification search"); _log.WithTag("HttpRequest", "GET").Info($"Notification search");
var request = await RequestHandling.ParseUri<NotificationSearch>(req); var request = await RequestHandling.ParseUri<NotificationSearch>(req);
if (!request.IsOk) { if (!request.IsOk) {
return await _context.RequestHandling.NotOk(req, request.ErrorV, "notification search"); return await _context.RequestHandling.NotOk(req, request.ErrorV, "notification search");
@ -32,7 +32,7 @@ public class Notifications {
private async Async.Task<HttpResponseData> Post(HttpRequestData req) { private async Async.Task<HttpResponseData> Post(HttpRequestData req) {
_log.Info("adding notification hook"); _log.WithTag("HttpRequest", "POST").Info($"adding notification hook");
var request = await RequestHandling.ParseRequest<NotificationCreate>(req); var request = await RequestHandling.ParseRequest<NotificationCreate>(req);
if (!request.IsOk) { if (!request.IsOk) {
return await _context.RequestHandling.NotOk(req, request.ErrorV, "notification create"); return await _context.RequestHandling.NotOk(req, request.ErrorV, "notification create");

View File

@ -114,7 +114,7 @@ public class Proxy {
var updated = forwardResult.OkV with { ProxyId = proxy.ProxyId }; var updated = forwardResult.OkV with { ProxyId = proxy.ProxyId };
var r = await _context.ProxyForwardOperations.Replace(updated); var r = await _context.ProxyForwardOperations.Replace(updated);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to update proxy forward with machine id {updated.MachineId} with new proxy id {proxy.ProxyId}"); _log.WithTag("HttpRequest", "POST").WithHttpStatus(r.ErrorV).Error($"failed to update proxy forward with {updated.MachineId:Tag:MachineId} with new {proxy.ProxyId:Tag:ProxyId}");
} }
await _context.ProxyOperations.SaveProxyConfig(proxy); await _context.ProxyOperations.SaveProxyConfig(proxy);
} }

View File

@ -55,7 +55,7 @@ public class QueueFileChanges {
var container = parts[0]; var container = parts[0];
var path = string.Join('/', parts.Skip(1)); var path = string.Join('/', parts.Skip(1));
log.Info($"file added container: {container} - path: {path}"); log.Info($"file added : {container:Tag:Container} - {path:Tag:Path}");
await _notificationOperations.NewFiles(Container.Parse(container), path, failTaskOnTransientError); await _notificationOperations.NewFiles(Container.Parse(container), path, failTaskOnTransientError);
} }
} }

View File

@ -22,20 +22,17 @@ public class QueueNodeHearbeat {
var events = _context.Events; var events = _context.Events;
var hb = JsonSerializer.Deserialize<NodeHeartbeatEntry>(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}"); var hb = JsonSerializer.Deserialize<NodeHeartbeatEntry>(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}");
var node = await nodes.GetByMachineId(hb.NodeId); var node = await nodes.GetByMachineId(hb.NodeId);
var log = _log.WithTag("NodeId", hb.NodeId.ToString());
if (node == null) { if (node == null) {
log.Warning("invalid node id"); _log.Warning($"invalid {hb.NodeId:Tag:NodeId}");
return; return;
} }
var newNode = node with { Heartbeat = DateTimeOffset.UtcNow }; var newNode = node with { Heartbeat = DateTimeOffset.UtcNow };
var r = await nodes.Replace(newNode); var r = await nodes.Replace(newNode);
if (!r.IsOk) { if (!r.IsOk) {
log.WithHttpStatus(r.ErrorV).Error($"Failed to replace heartbeat"); _log.WithHttpStatus(r.ErrorV).Error($"Failed to replace heartbeat: {hb.NodeId:Tag:NodeId}");
} }
// TODO: do we still send event if we fail do update the table ? // TODO: do we still send event if we fail do update the table ?

View File

@ -23,17 +23,15 @@ public class QueueProxyHearbeat {
var proxy = await _proxy.GetByProxyId(newHb.ProxyId); var proxy = await _proxy.GetByProxyId(newHb.ProxyId);
var log = _log.WithTag("ProxyId", newHb.ProxyId.ToString());
if (proxy == null) { if (proxy == null) {
log.Warning($"invalid proxy id: {newHb.ProxyId}"); _log.Warning($"invalid proxy id: {newHb.ProxyId:Tag:ProxyId}");
return; return;
} }
var newProxy = proxy with { Heartbeat = newHb }; var newProxy = proxy with { Heartbeat = newHb };
var r = await _proxy.Replace(newProxy); var r = await _proxy.Replace(newProxy);
if (!r.IsOk) { if (!r.IsOk) {
log.WithHttpStatus(r.ErrorV).Error($"Failed to replace proxy heartbeat"); _log.WithHttpStatus(r.ErrorV).Error($"Failed to replace proxy heartbeat {newHb.ProxyId:Tag:ProxyId}");
} }
} }
} }

View File

@ -24,17 +24,16 @@ public class QueueTaskHearbeat {
var hb = JsonSerializer.Deserialize<TaskHeartbeatEntry>(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}"); var hb = JsonSerializer.Deserialize<TaskHeartbeatEntry>(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}");
var task = await _tasks.GetByTaskId(hb.TaskId); var task = await _tasks.GetByTaskId(hb.TaskId);
var log = _log.WithTag("TaskId", hb.TaskId.ToString());
if (task == null) { if (task == null) {
log.Warning($"invalid task id"); _log.Warning($"invalid {hb.TaskId:Tag:TaskId}");
return; return;
} }
var newTask = task with { Heartbeat = DateTimeOffset.UtcNow }; var newTask = task with { Heartbeat = DateTimeOffset.UtcNow };
var r = await _tasks.Replace(newTask); var r = await _tasks.Replace(newTask);
if (!r.IsOk) { if (!r.IsOk) {
log.WithHttpStatus(r.ErrorV).Error($"failed to replace with new task"); _log.WithHttpStatus(r.ErrorV).Error($"failed to replace with new task {hb.TaskId:Tag:TaskId}");
} }
await _events.SendEvent(new EventTaskHeartbeat(newTask.JobId, newTask.TaskId, newTask.Config)); await _events.SendEvent(new EventTaskHeartbeat(newTask.JobId, newTask.TaskId, newTask.Config));
} }

View File

@ -114,7 +114,7 @@ public class ReproVmss {
var updatedRepro = vm with { State = VmState.Stopping }; var updatedRepro = vm with { State = VmState.Stopping };
var r = await _context.ReproOperations.Replace(updatedRepro); var r = await _context.ReproOperations.Replace(updatedRepro);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV!).Error($"Failed to replace repro {updatedRepro.VmId}"); _log.WithHttpStatus(r.ErrorV!).Error($"Failed to replace repro {updatedRepro.VmId:Tag:VmId}");
} }
var response = req.CreateResponse(HttpStatusCode.OK); var response = req.CreateResponse(HttpStatusCode.OK);

View File

@ -139,7 +139,7 @@ public class Scaleset {
var inserted = await _context.ScalesetOperations.Insert(scaleset); var inserted = await _context.ScalesetOperations.Insert(scaleset);
if (!inserted.IsOk) { if (!inserted.IsOk) {
_log.WithHttpStatus(inserted.ErrorV).Error($"failed to insert new scaleset {scaleset.ScalesetId}"); _log.WithHttpStatus(inserted.ErrorV).Error($"failed to insert new scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
return await _context.RequestHandling.NotOk( return await _context.RequestHandling.NotOk(
req, req,
new Error( new Error(
@ -162,7 +162,7 @@ public class Scaleset {
var r = await _context.AutoScaleOperations.Insert(autoScale); var r = await _context.AutoScaleOperations.Insert(autoScale);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to insert autoscale options for sclaeset id {autoScale.ScalesetId}"); _log.WithHttpStatus(r.ErrorV).Error($"failed to insert autoscale options for sclaeset id {autoScale.ScalesetId:Tag:ScalesetId}");
} }
} }

View File

@ -20,21 +20,19 @@ public class TimerDaily {
public async Async.Task Run([TimerTrigger("1.00:00:00")] TimerInfo myTimer) { public async Async.Task Run([TimerTrigger("1.00:00:00")] TimerInfo myTimer) {
var scalesets = _scalesets.Search(); var scalesets = _scalesets.Search();
await foreach (var scaleset in scalesets) { await foreach (var scaleset in scalesets) {
var log = _log.WithTag("ScalesetId", scaleset.ScalesetId.ToString()); _log.Info($"updating scaleset configs {scaleset.ScalesetId:Tag:ScalesetId}");
log.Info($"updating scaleset configs");
// todo: do it in batches // todo: do it in batches
var r = await _scalesets.Replace(scaleset with { NeedsConfigUpdate = true }); var r = await _scalesets.Replace(scaleset with { NeedsConfigUpdate = true });
if (!r.IsOk) { if (!r.IsOk) {
log.WithHttpStatus(r.ErrorV).Error($"failed to replace scaleset with NeedsConfigUpdate set to true"); _log.WithHttpStatus(r.ErrorV).Error($"failed to replace scaleset {scaleset.ScalesetId:Tag:ScalesetId} with NeedsConfigUpdate set to true");
} }
} }
var expiredWebhookLogs = _webhookMessageLogs.SearchExpired(); var expiredWebhookLogs = _webhookMessageLogs.SearchExpired();
await foreach (var logEntry in expiredWebhookLogs) { await foreach (var logEntry in expiredWebhookLogs) {
var log = _log.WithTags(new[] { ("WebhookId", logEntry.WebhookId.ToString()), ("EventId", logEntry.EventId.ToString()) }); _log.Info($"stopping expired webhook {logEntry.WebhookId:Tag:WebhookId} message log for {logEntry.EventId:Tag:EventId}");
log.Info("stopping expired webhook message log");
var r = await _webhookMessageLogs.Delete(logEntry); var r = await _webhookMessageLogs.Delete(logEntry);
if (!r.IsOk) { if (!r.IsOk) {
log.WithHttpStatus(r.ErrorV).Error("failed to delete log entry for an expired webhook message"); _log.WithHttpStatus(r.ErrorV).Error($"failed to delete log entry for an expired webhook {logEntry.WebhookId:Tag:WebhookId} message for {logEntry.EventId:Tag:EventId}");
} }
} }
} }

View File

@ -29,11 +29,11 @@ public class TimerProxy {
// requesting to use the proxy while this function is checking if it's // requesting to use the proxy while this function is checking if it's
// out of date // out of date
if (proxy.Outdated && !(await _context.ProxyOperations.IsUsed(proxy))) { if (proxy.Outdated && !(await _context.ProxyOperations.IsUsed(proxy))) {
_logger.Warning($"scaleset-proxy: outdated and not used: {proxy.Region}"); _logger.Warning($"scaleset-proxy: outdated and not used: {proxy.Region:Tag:Region}");
proxy = await proxyOperations.SetState(proxy, VmState.Stopping); proxy = await proxyOperations.SetState(proxy, VmState.Stopping);
// If something is "wrong" with a proxy, delete & recreate it // If something is "wrong" with a proxy, delete & recreate it
} else if (!proxyOperations.IsAlive(proxy)) { } else if (!proxyOperations.IsAlive(proxy)) {
_logger.Error($"scaleset-proxy: alive check failed, stopping: {proxy.Region}"); _logger.Error($"scaleset-proxy: alive check failed, stopping: {proxy.Region:Tag:Region}");
proxy = await proxyOperations.SetState(proxy, VmState.Stopping); proxy = await proxyOperations.SetState(proxy, VmState.Stopping);
} else { } else {
await proxyOperations.SaveProxyConfig(proxy); await proxyOperations.SaveProxyConfig(proxy);
@ -41,14 +41,14 @@ public class TimerProxy {
} }
if (VmStateHelper.NeedsWork.Contains(proxy.State)) { if (VmStateHelper.NeedsWork.Contains(proxy.State)) {
_logger.Info($"scaleset-proxy: update state. proxy:{proxy.Region} state:{proxy.State}"); _logger.Info($"scaleset-proxy: update state. proxy:{proxy.Region:Tag:Region} - {proxy.State:Tag:State}");
proxy = await proxyOperations.ProcessStateUpdate(proxy); proxy = await proxyOperations.ProcessStateUpdate(proxy);
} }
if (proxy is not null && proxy.State != VmState.Stopped && proxyOperations.IsOutdated(proxy)) { if (proxy is not null && proxy.State != VmState.Stopped && proxyOperations.IsOutdated(proxy)) {
var r = await proxyOperations.Replace(proxy with { Outdated = true }); var r = await proxyOperations.Replace(proxy with { Outdated = true });
if (!r.IsOk) { if (!r.IsOk) {
_logger.WithHttpStatus(r.ErrorV).Error($"Failed to replace proxy recordy for proxy {proxy.ProxyId}"); _logger.WithHttpStatus(r.ErrorV).Error($"Failed to replace proxy record for proxy {proxy.ProxyId:Tag:ProxyId}");
} }
} }
} }
@ -60,7 +60,7 @@ public class TimerProxy {
var allOutdated = proxies.Where(x => x.Region == region).All(p => p.Outdated); var allOutdated = proxies.Where(x => x.Region == region).All(p => p.Outdated);
if (allOutdated) { if (allOutdated) {
var proxy = await proxyOperations.GetOrCreate(region); var proxy = await proxyOperations.GetOrCreate(region);
_logger.Info($"Creating new proxy with id {proxy.ProxyId} in region {region}"); _logger.Info($"Creating new proxy with id {proxy.ProxyId:Tag:ProxyId} in {region:Tag:Region}");
} }
// this is required in order to support upgrade from non-nsg to // this is required in order to support upgrade from non-nsg to
@ -78,7 +78,7 @@ public class TimerProxy {
if (vnet != null) { if (vnet != null) {
var result = await nsgOperations.AssociateSubnet(nsgName, vnet, subnet); var result = await nsgOperations.AssociateSubnet(nsgName, vnet, subnet);
if (!result.OkV) { if (!result.OkV) {
_logger.Error($"Failed to associate NSG and subnet due to {result.ErrorV} in region {region}"); _logger.Error($"Failed to associate NSG and subnet due to {result.ErrorV} in {region:Tag:Region}");
} }
} }
} }
@ -90,7 +90,7 @@ public class TimerProxy {
if (nsgOperations.OkToDelete(regions, nsg.Data.Location!, nsg.Data.Name)) { if (nsgOperations.OkToDelete(regions, nsg.Data.Location!, nsg.Data.Name)) {
if (nsg.Data.NetworkInterfaces.Count == 0 && nsg.Data.Subnets.Count == 0) { if (nsg.Data.NetworkInterfaces.Count == 0 && nsg.Data.Subnets.Count == 0) {
if (!await nsgOperations.StartDeleteNsg(nsg.Data.Name)) { if (!await nsgOperations.StartDeleteNsg(nsg.Data.Name)) {
_logger.Warning($"failed to start deleting NSG {nsg.Data.Name}"); _logger.Warning($"failed to start deleting NSG {nsg.Data.Name:Tag:NsgName}");
} }
} }
} }

View File

@ -18,7 +18,7 @@ public class TimerRepro {
{ {
var expired = _onefuzzContext.ReproOperations.SearchExpired(); var expired = _onefuzzContext.ReproOperations.SearchExpired();
await foreach (var repro in expired) { await foreach (var repro in expired) {
_log.Info($"stopping repro: {repro.VmId}"); _log.Info($"stopping repro: {repro.VmId:Tag:VmId}");
_ = expiredVmIds.Add(repro.VmId); _ = expiredVmIds.Add(repro.VmId);
// ignoring result: value not used later // ignoring result: value not used later
_ = await _onefuzzContext.ReproOperations.Stopping(repro); _ = await _onefuzzContext.ReproOperations.Stopping(repro);
@ -31,7 +31,7 @@ public class TimerRepro {
continue; continue;
} }
_log.Info($"update repro: {repro.VmId}"); _log.Info($"update repro: {repro.VmId:Tag:VmId}");
// ignoring result: value not used later // ignoring result: value not used later
_ = await _onefuzzContext.ReproOperations.ProcessStateUpdates(repro); _ = await _onefuzzContext.ReproOperations.ProcessStateUpdates(repro);
} }

View File

@ -59,50 +59,50 @@ public class TimerRetention {
} }
await foreach (var notification in _notificaitonOps.QueryAsync(timeFilter)) { await foreach (var notification in _notificaitonOps.QueryAsync(timeFilter)) {
_log.Verbose($"checking expired notification for removal: {notification.NotificationId}"); _log.Verbose($"checking expired notification for removal: {notification.NotificationId:Tag:NotificationId}");
var container = notification.Container; var container = notification.Container;
if (!usedContainers.Contains(container)) { if (!usedContainers.Contains(container)) {
_log.Info($"deleting expired notification: {notification.NotificationId}"); _log.Info($"deleting expired notification: {notification.NotificationId:Tag:NotificationId}");
var r = await _notificaitonOps.Delete(notification); var r = await _notificaitonOps.Delete(notification);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to delete notification with id {notification.NotificationId}"); _log.WithHttpStatus(r.ErrorV).Error($"failed to delete notification {notification.NotificationId:Tag:NotificationId}");
} }
} }
} }
await foreach (var job in _jobOps.QueryAsync(Query.And(timeFilter, Query.EqualEnum("state", JobState.Enabled)))) { await foreach (var job in _jobOps.QueryAsync(Query.And(timeFilter, Query.EqualEnum("state", JobState.Enabled)))) {
if (job.UserInfo is not null && job.UserInfo.Upn is not null) { if (job.UserInfo is not null && job.UserInfo.Upn is not null) {
_log.Info($"removing PII from job {job.JobId}"); _log.Info($"removing PII from job {job.JobId:Tag:JobId}");
var userInfo = job.UserInfo with { Upn = null }; var userInfo = job.UserInfo with { Upn = null };
var updatedJob = job with { UserInfo = userInfo }; var updatedJob = job with { UserInfo = userInfo };
var r = await _jobOps.Replace(updatedJob); var r = await _jobOps.Replace(updatedJob);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"Failed to save job {updatedJob.JobId}"); _log.WithHttpStatus(r.ErrorV).Error($"Failed to save job {updatedJob.JobId:Tag:JobId}");
} }
} }
} }
await foreach (var task in _taskOps.QueryAsync(Query.And(timeFilter, Query.EqualEnum("state", TaskState.Stopped)))) { await foreach (var task in _taskOps.QueryAsync(Query.And(timeFilter, Query.EqualEnum("state", TaskState.Stopped)))) {
if (task.UserInfo is not null && task.UserInfo.Upn is not null) { if (task.UserInfo is not null && task.UserInfo.Upn is not null) {
_log.Info($"removing PII from task {task.TaskId}"); _log.Info($"removing PII from task {task.TaskId:Tag:TaskId}");
var userInfo = task.UserInfo with { Upn = null }; var userInfo = task.UserInfo with { Upn = null };
var updatedTask = task with { UserInfo = userInfo }; var updatedTask = task with { UserInfo = userInfo };
var r = await _taskOps.Replace(updatedTask); var r = await _taskOps.Replace(updatedTask);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"Failed to save task {updatedTask.TaskId}"); _log.WithHttpStatus(r.ErrorV).Error($"Failed to save task {updatedTask.TaskId:Tag:TaskId}");
} }
} }
} }
await foreach (var repro in _reproOps.QueryAsync(timeFilter)) { await foreach (var repro in _reproOps.QueryAsync(timeFilter)) {
if (repro.UserInfo is not null && repro.UserInfo.Upn is not null) { if (repro.UserInfo is not null && repro.UserInfo.Upn is not null) {
_log.Info($"removing PII from repro: {repro.VmId}"); _log.Info($"removing PII from repro: {repro.VmId:Tag:VmId}");
var userInfo = repro.UserInfo with { Upn = null }; var userInfo = repro.UserInfo with { Upn = null };
var updatedRepro = repro with { UserInfo = userInfo }; var updatedRepro = repro with { UserInfo = userInfo };
var r = await _reproOps.Replace(updatedRepro); var r = await _reproOps.Replace(updatedRepro);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"Failed to save repro {updatedRepro.VmId}"); _log.WithHttpStatus(r.ErrorV).Error($"Failed to save repro {updatedRepro.VmId:Tag:VmId}");
} }
} }
} }
@ -117,7 +117,7 @@ public class TimerRetention {
var pool = await _poolOps.GetById(queueId); var pool = await _poolOps.GetById(queueId);
if (!pool.IsOk) { if (!pool.IsOk) {
//pool does not exist. Ok to delete the pool queue //pool does not exist. Ok to delete the pool queue
_log.Info($"Deleting pool queue since pool could not be found in Pool table {q.Name}"); _log.Info($"Deleting {q.Name:Tag:PoolQueueName} since pool could not be found in Pool table");
await _queue.DeleteQueue(q.Name, StorageType.Corpus); await _queue.DeleteQueue(q.Name, StorageType.Corpus);
} }
} }
@ -126,13 +126,13 @@ public class TimerRetention {
var taskQueue = await _taskOps.GetByTaskId(queueId); var taskQueue = await _taskOps.GetByTaskId(queueId);
if (taskQueue is null) { if (taskQueue is null) {
// task does not exist. Ok to delete the task queue // task does not exist. Ok to delete the task queue
_log.Info($"Deleting task queue, since task could not be found in Task table {q.Name}"); _log.Info($"Deleting {q.Name:Tag:TaskQueueName} since task could not be found in Task table ");
await _queue.DeleteQueue(q.Name, StorageType.Corpus); await _queue.DeleteQueue(q.Name, StorageType.Corpus);
} }
} else if (q.Name.StartsWith(ShrinkQueue.ShrinkQueueNamePrefix)) { } else if (q.Name.StartsWith(ShrinkQueue.ShrinkQueueNamePrefix)) {
//ignore Shrink Queues, since they seem to behave ok //ignore Shrink Queues, since they seem to behave ok
} else { } else {
_log.Warning($"Unhandled queue name {q.Name} when doing garbage collection on queues"); _log.Warning($"Unhandled {q.Name:Tag:QueueName} when doing garbage collection on queues");
} }
} }
} }

View File

@ -25,7 +25,7 @@ public class TimerTasks {
public async Async.Task Run([TimerTrigger("00:00:15")] TimerInfo myTimer) { public async Async.Task Run([TimerTrigger("00:00:15")] TimerInfo myTimer) {
var expriredTasks = _taskOperations.SearchExpired(); var expriredTasks = _taskOperations.SearchExpired();
await foreach (var task in expriredTasks) { await foreach (var task in expriredTasks) {
_logger.Info($"stopping expired task. job_id:{task.JobId} task_id:{task.TaskId}"); _logger.Info($"stopping expired task. job_id:{task.JobId:Tag:JobId} task_id:{task.TaskId:Tag:TaskId}");
await _taskOperations.MarkStopping(task); await _taskOperations.MarkStopping(task);
} }
@ -33,20 +33,20 @@ public class TimerTasks {
var expiredJobs = _jobOperations.SearchExpired(); var expiredJobs = _jobOperations.SearchExpired();
await foreach (var job in expiredJobs) { await foreach (var job in expiredJobs) {
_logger.Info($"stopping expired job. job_id:{job.JobId}"); _logger.Info($"stopping expired job. job_id:{job.JobId:Tag:JobId}");
_ = await _jobOperations.Stopping(job); _ = await _jobOperations.Stopping(job);
} }
var jobs = _jobOperations.SearchState(states: JobStateHelper.NeedsWork); var jobs = _jobOperations.SearchState(states: JobStateHelper.NeedsWork);
await foreach (var job in jobs) { await foreach (var job in jobs) {
_logger.Info($"update job: {job.JobId}"); _logger.Info($"update job: {job.JobId:Tag:JobId}");
_ = await _jobOperations.ProcessStateUpdates(job); _ = await _jobOperations.ProcessStateUpdates(job);
} }
var tasks = _taskOperations.SearchStates(states: TaskStateHelper.NeedsWorkStates); var tasks = _taskOperations.SearchStates(states: TaskStateHelper.NeedsWorkStates);
await foreach (var task in tasks) { await foreach (var task in tasks) {
_logger.Info($"update task: {task.TaskId}"); _logger.Info($"update task: {task.TaskId:Tag:TaskId}");
_ = await _taskOperations.ProcessStateUpdate(task); _ = await _taskOperations.ProcessStateUpdate(task);
} }

View File

@ -16,18 +16,18 @@ public class TimerWorkers {
} }
private async Async.Task ProcessScalesets(Service.Scaleset scaleset) { private async Async.Task ProcessScalesets(Service.Scaleset scaleset) {
_log.Verbose($"checking scaleset for updates: {scaleset.ScalesetId}"); _log.Verbose($"checking scaleset for updates: {scaleset.ScalesetId:Tag:ScalesetId}");
scaleset = await _scaleSetOps.UpdateConfigs(scaleset); scaleset = await _scaleSetOps.UpdateConfigs(scaleset);
var r = await _scaleSetOps.SyncAutoscaleSettings(scaleset); var r = await _scaleSetOps.SyncAutoscaleSettings(scaleset);
if (!r.IsOk) { if (!r.IsOk) {
_log.Error($"failed to sync auto scale settings due to {r.ErrorV}"); _log.Error($"failed to sync auto scale settings {scaleset.ScalesetId:Tag:ScalesetId} due to {r.ErrorV:Tag:Error}");
} }
// if the scaleset is touched during cleanup, don't continue to process it // if the scaleset is touched during cleanup, don't continue to process it
var (touched, ss) = await _scaleSetOps.CleanupNodes(scaleset); var (touched, ss) = await _scaleSetOps.CleanupNodes(scaleset);
if (touched) { if (touched) {
_log.Verbose($"scaleset needed cleanup: {scaleset.ScalesetId}"); _log.Verbose($"scaleset needed cleanup: {scaleset.ScalesetId:Tag:ScalesetId}");
return; return;
} }
@ -46,7 +46,7 @@ public class TimerWorkers {
var pools = _poolOps.SearchAll(); var pools = _poolOps.SearchAll();
await foreach (var pool in pools) { await foreach (var pool in pools) {
if (PoolStateHelper.NeedsWork.Contains(pool.State)) { if (PoolStateHelper.NeedsWork.Contains(pool.State)) {
_log.Info($"update pool: {pool.PoolId} ({pool.Name})"); _log.Info($"update pool: {pool.PoolId:Tag:PoolId} - {pool.Name:Tag:PoolName}");
_ = await _poolOps.ProcessStateUpdate(pool); _ = await _poolOps.ProcessStateUpdate(pool);
} }
} }
@ -62,7 +62,7 @@ public class TimerWorkers {
var nodes = _nodeOps.SearchStates(states: NodeStateHelper.NeedsWorkStates); var nodes = _nodeOps.SearchStates(states: NodeStateHelper.NeedsWorkStates);
await foreach (var node in nodes) { await foreach (var node in nodes) {
_log.Info($"update node: {node.MachineId}"); _log.Info($"update node: {node.MachineId:Tag:MachineId}");
_ = await _nodeOps.ProcessStateUpdate(node); _ = await _nodeOps.ProcessStateUpdate(node);
} }

View File

@ -39,7 +39,7 @@ public class WebhookLogs {
return await _context.RequestHandling.NotOk(req, new Error(ErrorCode.INVALID_REQUEST, new[] { "unable to find webhook" }), "webhook log"); return await _context.RequestHandling.NotOk(req, new Error(ErrorCode.INVALID_REQUEST, new[] { "unable to find webhook" }), "webhook log");
} }
_log.Info($"getting webhook logs: {request.OkV.WebhookId}"); _log.Info($"getting webhook logs: {request.OkV.WebhookId:Tag:WebhookId}");
var logs = _context.WebhookMessageLogOperations.SearchByPartitionKeys(new[] { $"{request.OkV.WebhookId}" }); var logs = _context.WebhookMessageLogOperations.SearchByPartitionKeys(new[] { $"{request.OkV.WebhookId}" });
var response = req.CreateResponse(HttpStatusCode.OK); var response = req.CreateResponse(HttpStatusCode.OK);

View File

@ -39,7 +39,7 @@ public class WebhookPing {
return await _context.RequestHandling.NotOk(req, new Error(ErrorCode.INVALID_REQUEST, new[] { "unable to find webhook" }), "webhook ping"); return await _context.RequestHandling.NotOk(req, new Error(ErrorCode.INVALID_REQUEST, new[] { "unable to find webhook" }), "webhook ping");
} }
_log.Info($"pinging webhook : {request.OkV.WebhookId}"); _log.Info($"pinging webhook : {request.OkV.WebhookId:Tag:WebhookId}");
EventPing ping = await _context.WebhookOperations.Ping(webhook); EventPing ping = await _context.WebhookOperations.Ping(webhook);
var response = req.CreateResponse(HttpStatusCode.OK); var response = req.CreateResponse(HttpStatusCode.OK);

View File

@ -36,7 +36,7 @@ public class Webhooks {
} }
if (request.OkV.WebhookId != null) { if (request.OkV.WebhookId != null) {
_log.Info($"getting webhook: {request.OkV.WebhookId}"); _log.Info($"getting webhook: {request.OkV.WebhookId:Tag:WebhookId}");
var webhook = await _context.WebhookOperations.GetByWebhookId(request.OkV.WebhookId.Value); var webhook = await _context.WebhookOperations.GetByWebhookId(request.OkV.WebhookId.Value);
if (webhook == null) { if (webhook == null) {
@ -48,7 +48,7 @@ public class Webhooks {
return response; return response;
} }
_log.Info("listing webhooks"); _log.Info($"listing webhooks");
var webhooks = _context.WebhookOperations.SearchAll().Select(w => w with { Url = null, SecretToken = null }); var webhooks = _context.WebhookOperations.SearchAll().Select(w => w with { Url = null, SecretToken = null });
var response2 = req.CreateResponse(HttpStatusCode.OK); var response2 = req.CreateResponse(HttpStatusCode.OK);
@ -68,7 +68,7 @@ public class Webhooks {
"webhook update"); "webhook update");
} }
_log.Info($"updating webhook: {request.OkV.WebhookId}"); _log.Info($"updating webhook: {request.OkV.WebhookId:Tag:WebhookId}");
var webhook = await _context.WebhookOperations.GetByWebhookId(request.OkV.WebhookId); var webhook = await _context.WebhookOperations.GetByWebhookId(request.OkV.WebhookId);
@ -86,7 +86,7 @@ public class Webhooks {
var r = await _context.WebhookOperations.Replace(updated); var r = await _context.WebhookOperations.Replace(updated);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to replace webhook with updated entry"); _log.WithHttpStatus(r.ErrorV).Error($"failed to replace webhook with updated entry {updated.WebhookId:Tag:WebhookId}");
} }
var response = req.CreateResponse(HttpStatusCode.OK); var response = req.CreateResponse(HttpStatusCode.OK);
@ -109,10 +109,10 @@ public class Webhooks {
var r = await _context.WebhookOperations.Insert(webhook); var r = await _context.WebhookOperations.Insert(webhook);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to insert webhook {webhook.WebhookId}"); _log.WithHttpStatus(r.ErrorV).Error($"failed to insert webhook {webhook.WebhookId:Tag:WebhookId}");
} }
_log.Info($"added webhook: {webhook.WebhookId}"); _log.Info($"added webhook: {webhook.WebhookId:Tag:WebhookId}");
var response = req.CreateResponse(HttpStatusCode.OK); var response = req.CreateResponse(HttpStatusCode.OK);
await response.WriteAsJsonAsync(webhook with { Url = null, SecretToken = null }); await response.WriteAsJsonAsync(webhook with { Url = null, SecretToken = null });
@ -129,7 +129,7 @@ public class Webhooks {
context: "webhook delete"); context: "webhook delete");
} }
_log.Info($"deleting webhook: {request.OkV.WebhookId}"); _log.Info($"deleting webhook: {request.OkV.WebhookId:Tag:WebhookId}");
var webhook = await _context.WebhookOperations.GetByWebhookId(request.OkV.WebhookId); var webhook = await _context.WebhookOperations.GetByWebhookId(request.OkV.WebhookId);
@ -139,7 +139,7 @@ public class Webhooks {
var r = await _context.WebhookOperations.Delete(webhook); var r = await _context.WebhookOperations.Delete(webhook);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"failed to delete webhook {webhook.Name} - {webhook.WebhookId}"); _log.WithHttpStatus(r.ErrorV).Error($"failed to delete webhook {webhook.Name:Tag:WebhookName} - {webhook.WebhookId:Tag:WebhookId}");
} }
var response = req.CreateResponse(HttpStatusCode.OK); var response = req.CreateResponse(HttpStatusCode.OK);

View File

@ -1,14 +1,57 @@
using System.Diagnostics; using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Text;
using Microsoft.ApplicationInsights; using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.DataContracts; using Microsoft.ApplicationInsights.DataContracts;
using Microsoft.ApplicationInsights.Extensibility; using Microsoft.ApplicationInsights.Extensibility;
namespace Microsoft.OneFuzz.Service; namespace Microsoft.OneFuzz.Service;
//See: https://learn.microsoft.com/en-us/dotnet/csharp/whats-new/tutorials/interpolated-string-handler
//by ref struct works, but Moq does not support by-ref struct value so all the tests break... https://github.com/moq/moq4/issues/829
[InterpolatedStringHandler]
public struct LogStringHandler {
private StringBuilder _builder;
private Dictionary<string, string>? _tags;
public LogStringHandler(int literalLength, int formattedCount) {
_builder = new StringBuilder(literalLength);
_tags = null;
}
public void AppendLiteral(string message) {
_builder.Append(message);
}
public void AppendFormatted<T>(T message) {
_builder.Append(message?.ToString());
}
public void AppendFormatted<T>(T message, string? format) {
if (format is not null && format.StartsWith("Tag:")) {
var tag = format["Tag:".Length..];
if (_tags is null) {
_tags = new Dictionary<string, string>();
}
_tags[tag] = $"{message}";
_builder.Append('{').Append(tag).Append('}');
} else if (message is IFormattable msg) {
_builder.Append(msg?.ToString(format, null));
} else {
_builder.Append(message?.ToString()).Append(':').Append(format);
}
}
public override string ToString() => _builder.ToString();
public IReadOnlyDictionary<string, string>? Tags => _tags;
}
public interface ILog { public interface ILog {
void Log(Guid correlationId, String message, SeverityLevel level, IReadOnlyDictionary<string, string> tags, string? caller); void Log(Guid correlationId, LogStringHandler message, SeverityLevel level, IReadOnlyDictionary<string, string> tags, string? caller);
void LogEvent(Guid correlationId, String evt, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller); void LogEvent(Guid correlationId, LogStringHandler evt, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller);
void LogException(Guid correlationId, Exception ex, string message, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller); void LogException(Guid correlationId, Exception ex, LogStringHandler message, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller);
void Flush(); void Flush();
} }
@ -19,31 +62,50 @@ class AppInsights : ILog {
_telemetryClient = new TelemetryClient(config); _telemetryClient = new TelemetryClient(config);
} }
public void Log(Guid correlationId, String message, SeverityLevel level, IReadOnlyDictionary<string, string> tags, string? caller) { public void Log(Guid correlationId, LogStringHandler message, SeverityLevel level, IReadOnlyDictionary<string, string> tags, string? caller) {
Dictionary<string, string> copyTags = new(tags); Dictionary<string, string> copyTags = new(tags);
copyTags["Correlation ID"] = correlationId.ToString(); copyTags["CorrelationId"] = correlationId.ToString();
if (message.Tags is not null) {
foreach (var kv in message.Tags) {
copyTags[kv.Key] = kv.Value;
}
}
if (caller is not null) copyTags["CalledBy"] = caller; if (caller is not null) copyTags["CalledBy"] = caller;
_telemetryClient.TrackTrace(message, level, copyTags); _telemetryClient.TrackTrace(message.ToString(), level, copyTags);
} }
public void LogEvent(Guid correlationId, String evt, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller) { public void LogEvent(Guid correlationId, LogStringHandler evt, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller) {
Dictionary<string, string> copyTags = new(tags); Dictionary<string, string> copyTags = new(tags);
copyTags["Correlation ID"] = correlationId.ToString(); copyTags["CorrelationId"] = correlationId.ToString();
if (caller is not null) copyTags["CalledBy"] = caller; if (caller is not null) copyTags["CalledBy"] = caller;
if (evt.Tags is not null) {
foreach (var kv in evt.Tags) {
copyTags[kv.Key] = kv.Value;
}
}
Dictionary<string, double>? copyMetrics = null; Dictionary<string, double>? copyMetrics = null;
if (metrics is not null) { if (metrics is not null) {
copyMetrics = new(metrics); copyMetrics = new(metrics);
} }
_telemetryClient.TrackEvent(evt, properties: copyTags, metrics: copyMetrics); _telemetryClient.TrackEvent(evt.ToString(), properties: copyTags, metrics: copyMetrics);
} }
public void LogException(Guid correlationId, Exception ex, string message, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller) { public void LogException(Guid correlationId, Exception ex, LogStringHandler message, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller) {
Dictionary<string, string> copyTags = new(tags); Dictionary<string, string> copyTags = new(tags);
copyTags["Correlation ID"] = correlationId.ToString(); copyTags["CorrelationId"] = correlationId.ToString();
if (caller is not null) copyTags["CalledBy"] = caller; if (caller is not null) copyTags["CalledBy"] = caller;
if (message.Tags is not null) {
foreach (var kv in message.Tags) {
copyTags[kv.Key] = kv.Value;
}
}
Dictionary<string, double>? copyMetrics = null; Dictionary<string, double>? copyMetrics = null;
if (metrics is not null) { if (metrics is not null) {
copyMetrics = new(metrics); copyMetrics = new(metrics);
@ -83,17 +145,20 @@ class Console : ILog {
} }
} }
public void Log(Guid correlationId, String message, SeverityLevel level, IReadOnlyDictionary<string, string> tags, string? caller) { public void Log(Guid correlationId, LogStringHandler message, SeverityLevel level, IReadOnlyDictionary<string, string> tags, string? caller) {
System.Console.Out.WriteLine($"[{correlationId}][{level}] {message}"); System.Console.Out.WriteLine($"[{correlationId}][{level}] {message.ToString()}");
LogTags(correlationId, tags); LogTags(correlationId, tags);
if (message.Tags is not null) {
LogTags(correlationId, message.Tags);
}
} }
public void LogEvent(Guid correlationId, String evt, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller) { public void LogEvent(Guid correlationId, LogStringHandler evt, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller) {
System.Console.Out.WriteLine($"[{correlationId}][Event] {evt}"); System.Console.Out.WriteLine($"[{correlationId}][Event] {evt}");
LogTags(correlationId, tags); LogTags(correlationId, tags);
LogMetrics(correlationId, metrics); LogMetrics(correlationId, metrics);
} }
public void LogException(Guid correlationId, Exception ex, string message, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller) { public void LogException(Guid correlationId, Exception ex, LogStringHandler message, IReadOnlyDictionary<string, string> tags, IReadOnlyDictionary<string, double>? metrics, string? caller) {
System.Console.Out.WriteLine($"[{correlationId}][Exception] {message}:{ex}"); System.Console.Out.WriteLine($"[{correlationId}][Exception] {message}:{ex}");
LogTags(correlationId, tags); LogTags(correlationId, tags);
LogMetrics(correlationId, metrics); LogMetrics(correlationId, metrics);
@ -106,16 +171,16 @@ class Console : ILog {
public interface ILogTracer { public interface ILogTracer {
IReadOnlyDictionary<string, string> Tags { get; } IReadOnlyDictionary<string, string> Tags { get; }
void Critical(string message); void Critical(LogStringHandler message);
void Error(string message); void Error(LogStringHandler message);
void Error(Error error); void Error(Error error);
void Event(string evt, IReadOnlyDictionary<string, double>? metrics); void Event(LogStringHandler evt, IReadOnlyDictionary<string, double>? metrics);
void Exception(Exception ex, string message = "", IReadOnlyDictionary<string, double>? metrics = null); void Exception(Exception ex, LogStringHandler message = $"", IReadOnlyDictionary<string, double>? metrics = null);
void ForceFlush(); void ForceFlush();
void Info(string message); void Info(LogStringHandler message);
void Warning(string message); void Warning(LogStringHandler message);
void Verbose(string message); void Verbose(LogStringHandler message);
ILogTracer WithTag(string k, string v); ILogTracer WithTag(string k, string v);
ILogTracer WithTags(IEnumerable<(string, string)>? tags); ILogTracer WithTags(IEnumerable<(string, string)>? tags);
@ -198,7 +263,7 @@ public class LogTracer : ILogTracerInternal {
return new LogTracer(CorrelationId, newTags, _loggers, _logSeverityLevel); return new LogTracer(CorrelationId, newTags, _loggers, _logSeverityLevel);
} }
public void Verbose(string message) { public void Verbose(LogStringHandler message) {
if (_logSeverityLevel <= SeverityLevel.Verbose) { if (_logSeverityLevel <= SeverityLevel.Verbose) {
var caller = GetCaller(); var caller = GetCaller();
foreach (var logger in _loggers) { foreach (var logger in _loggers) {
@ -207,7 +272,7 @@ public class LogTracer : ILogTracerInternal {
} }
} }
public void Info(string message) { public void Info(LogStringHandler message) {
if (_logSeverityLevel <= SeverityLevel.Information) { if (_logSeverityLevel <= SeverityLevel.Information) {
var caller = GetCaller(); var caller = GetCaller();
foreach (var logger in _loggers) { foreach (var logger in _loggers) {
@ -216,7 +281,7 @@ public class LogTracer : ILogTracerInternal {
} }
} }
public void Warning(string message) { public void Warning(LogStringHandler message) {
if (_logSeverityLevel <= SeverityLevel.Warning) { if (_logSeverityLevel <= SeverityLevel.Warning) {
var caller = GetCaller(); var caller = GetCaller();
foreach (var logger in _loggers) { foreach (var logger in _loggers) {
@ -225,7 +290,7 @@ public class LogTracer : ILogTracerInternal {
} }
} }
public void Error(string message) { public void Error(LogStringHandler message) {
if (_logSeverityLevel <= SeverityLevel.Error) { if (_logSeverityLevel <= SeverityLevel.Error) {
var caller = GetCaller(); var caller = GetCaller();
foreach (var logger in _loggers) { foreach (var logger in _loggers) {
@ -234,7 +299,7 @@ public class LogTracer : ILogTracerInternal {
} }
} }
public void Critical(string message) { public void Critical(LogStringHandler message) {
if (_logSeverityLevel <= SeverityLevel.Critical) { if (_logSeverityLevel <= SeverityLevel.Critical) {
var caller = GetCaller(); var caller = GetCaller();
foreach (var logger in _loggers) { foreach (var logger in _loggers) {
@ -243,14 +308,14 @@ public class LogTracer : ILogTracerInternal {
} }
} }
public void Event(string evt, IReadOnlyDictionary<string, double>? metrics) { public void Event(LogStringHandler evt, IReadOnlyDictionary<string, double>? metrics) {
var caller = GetCaller(); var caller = GetCaller();
foreach (var logger in _loggers) { foreach (var logger in _loggers) {
logger.LogEvent(CorrelationId, evt, Tags, metrics, caller); logger.LogEvent(CorrelationId, evt, Tags, metrics, caller);
} }
} }
public void Exception(Exception ex, string message, IReadOnlyDictionary<string, double>? metrics) { public void Exception(Exception ex, LogStringHandler message, IReadOnlyDictionary<string, double>? metrics) {
var caller = GetCaller(); var caller = GetCaller();
foreach (var logger in _loggers) { foreach (var logger in _loggers) {
logger.LogException(CorrelationId, ex, message, Tags, metrics, caller); logger.LogException(CorrelationId, ex, message, Tags, metrics, caller);
@ -264,7 +329,7 @@ public class LogTracer : ILogTracerInternal {
} }
public void Error(Error error) { public void Error(Error error) {
Error(error.ToString()); Error($"{error:Tag:Error}");
} }
} }

View File

@ -21,7 +21,7 @@ namespace ApiService.TestHooks {
[Function("GetInstanceIdTestHook")] [Function("GetInstanceIdTestHook")]
public async Task<HttpResponseData> GetInstanceId([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/containers/instanceId")] HttpRequestData req) { public async Task<HttpResponseData> GetInstanceId([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/containers/instanceId")] HttpRequestData req) {
_log.Info("Get instance ID"); _log.Info($"Get instance ID");
var instanceId = await _containers.GetInstanceId(); var instanceId = await _containers.GetInstanceId();
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);

View File

@ -8,20 +8,20 @@ using Microsoft.OneFuzz.Service;
#if DEBUG #if DEBUG
namespace ApiService.TestHooks { namespace ApiService.TestHooks {
public class CredsTestHookks { public class CredsTestHooks {
private readonly ILogTracer _log; private readonly ILogTracer _log;
private readonly IConfigOperations _configOps; private readonly IConfigOperations _configOps;
private readonly ICreds _creds; private readonly ICreds _creds;
public CredsTestHookks(ILogTracer log, IConfigOperations configOps, ICreds creds) { public CredsTestHooks(ILogTracer log, IConfigOperations configOps, ICreds creds) {
_log = log.WithTag("TestHooks", nameof(CredsTestHookks)); _log = log.WithTag("TestHooks", nameof(CredsTestHooks));
_configOps = configOps; _configOps = configOps;
_creds = creds; _creds = creds;
} }
[Function("GetSubscriptionTestHook")] [Function("GetSubscriptionTestHook")]
public async Task<HttpResponseData> GetSubscription([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/subscription")] HttpRequestData req) { public async Task<HttpResponseData> GetSubscription([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/subscription")] HttpRequestData req) {
_log.Info("Get subscription"); _log.Info($"Get subscription");
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);
await resp.WriteStringAsync(_creds.GetSubscription().ToString()); await resp.WriteStringAsync(_creds.GetSubscription().ToString());
return resp; return resp;
@ -30,7 +30,7 @@ namespace ApiService.TestHooks {
[Function("GetBaseResourceGroupTestHook")] [Function("GetBaseResourceGroupTestHook")]
public async Task<HttpResponseData> GetBaseResourceGroup([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/baseResourceGroup")] HttpRequestData req) { public async Task<HttpResponseData> GetBaseResourceGroup([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/baseResourceGroup")] HttpRequestData req) {
_log.Info("Get base resource group"); _log.Info($"Get base resource group");
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);
await resp.WriteStringAsync(_creds.GetBaseResourceGroup().ToString()); await resp.WriteStringAsync(_creds.GetBaseResourceGroup().ToString());
return resp; return resp;
@ -38,7 +38,7 @@ namespace ApiService.TestHooks {
[Function("GetInstanceNameTestHook")] [Function("GetInstanceNameTestHook")]
public async Task<HttpResponseData> GetInstanceName([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/instanceName")] HttpRequestData req) { public async Task<HttpResponseData> GetInstanceName([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/instanceName")] HttpRequestData req) {
_log.Info("Get instance name"); _log.Info($"Get instance name");
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);
await resp.WriteStringAsync(_creds.GetInstanceName().ToString()); await resp.WriteStringAsync(_creds.GetInstanceName().ToString());
return resp; return resp;
@ -46,7 +46,7 @@ namespace ApiService.TestHooks {
[Function("GetBaseRegionTestHook")] [Function("GetBaseRegionTestHook")]
public async Task<HttpResponseData> GetBaseRegion([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/baseRegion")] HttpRequestData req) { public async Task<HttpResponseData> GetBaseRegion([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/baseRegion")] HttpRequestData req) {
_log.Info("Get base region"); _log.Info($"Get base region");
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);
var region = await _creds.GetBaseRegion(); var region = await _creds.GetBaseRegion();
await resp.WriteStringAsync(region.String); await resp.WriteStringAsync(region.String);
@ -55,7 +55,7 @@ namespace ApiService.TestHooks {
[Function("GetInstanceUrlTestHook")] [Function("GetInstanceUrlTestHook")]
public async Task<HttpResponseData> GetInstanceUrl([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/instanceUrl")] HttpRequestData req) { public async Task<HttpResponseData> GetInstanceUrl([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/creds/instanceUrl")] HttpRequestData req) {
_log.Info("Get instance url"); _log.Info($"Get instance url");
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);
await resp.WriteStringAsync(_creds.GetInstanceUrl().ToString()); await resp.WriteStringAsync(_creds.GetInstanceUrl().ToString());
return resp; return resp;

View File

@ -23,7 +23,7 @@ namespace ApiService.TestHooks {
[Function("GetDiskNamesTestHook")] [Function("GetDiskNamesTestHook")]
public async Task<HttpResponseData> GetSubscription([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/disks")] HttpRequestData req) { public async Task<HttpResponseData> GetSubscription([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/disks")] HttpRequestData req) {
_log.Info("Get disk names"); _log.Info($"Get disk names");
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);
var diskNames = _diskOps.ListDisks(_creds.GetBaseResourceGroup()).ToList().Select(x => x.Data.Name); var diskNames = _diskOps.ListDisks(_creds.GetBaseResourceGroup()).ToList().Select(x => x.Data.Name);
await resp.WriteAsJsonAsync(diskNames); await resp.WriteAsJsonAsync(diskNames);

View File

@ -22,7 +22,7 @@ namespace ApiService.TestHooks {
[Function("LogEventTestHook")] [Function("LogEventTestHook")]
public async Task<HttpResponseData> LogEvent([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/events/logEvent")] HttpRequestData req) { public async Task<HttpResponseData> LogEvent([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/events/logEvent")] HttpRequestData req) {
_log.Info("Log event"); _log.Info($"Log event");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var msg = JsonSerializer.Deserialize<EventMessage>(s!, EntityConverter.GetJsonSerializerOptions()); var msg = JsonSerializer.Deserialize<EventMessage>(s!, EntityConverter.GetJsonSerializerOptions());
@ -34,7 +34,7 @@ namespace ApiService.TestHooks {
[Function("SendEventTestHook")] [Function("SendEventTestHook")]
public async Task<HttpResponseData> SendEvent([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/events/sendEvent")] HttpRequestData req) { public async Task<HttpResponseData> SendEvent([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/events/sendEvent")] HttpRequestData req) {
_log.Info("Send event"); _log.Info($"Send event");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var msg = JsonSerializer.Deserialize<EventMessage>(s!, EntityConverter.GetJsonSerializerOptions()); var msg = JsonSerializer.Deserialize<EventMessage>(s!, EntityConverter.GetJsonSerializerOptions());

View File

@ -22,7 +22,7 @@ namespace ApiService.TestHooks {
[Function("GenericExtensionsHook")] [Function("GenericExtensionsHook")]
public async Task<HttpResponseData> GenericExtensions([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/extensions/genericExtensions")] HttpRequestData req) { public async Task<HttpResponseData> GenericExtensions([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/extensions/genericExtensions")] HttpRequestData req) {
_log.Info("Get Generic extensions"); _log.Info($"Get Generic extensions");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
Os os = Enum.Parse<Os>(query["os"]); Os os = Enum.Parse<Os>(query["os"]);

View File

@ -21,11 +21,11 @@ namespace ApiService.TestHooks {
[Function("GetInstanceConfigTestHook")] [Function("GetInstanceConfigTestHook")]
public async Task<HttpResponseData> Get([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/instance-config")] HttpRequestData req) { public async Task<HttpResponseData> Get([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/instance-config")] HttpRequestData req) {
_log.Info("Fetching instance config"); _log.Info($"Fetching instance config");
var config = await _configOps.Fetch(); var config = await _configOps.Fetch();
if (config is null) { if (config is null) {
_log.Error("Instance config is null"); _log.Error($"Instance config is null");
Error err = new(ErrorCode.INVALID_REQUEST, new[] { "Instance config is null" }); Error err = new(ErrorCode.INVALID_REQUEST, new[] { "Instance config is null" });
var resp = req.CreateResponse(HttpStatusCode.InternalServerError); var resp = req.CreateResponse(HttpStatusCode.InternalServerError);
await resp.WriteAsJsonAsync(err); await resp.WriteAsJsonAsync(err);
@ -41,7 +41,7 @@ namespace ApiService.TestHooks {
[Function("PatchInstanceConfigTestHook")] [Function("PatchInstanceConfigTestHook")]
public async Task<HttpResponseData> Patch([HttpTrigger(AuthorizationLevel.Anonymous, "patch", Route = "testhooks/instance-config")] HttpRequestData req) { public async Task<HttpResponseData> Patch([HttpTrigger(AuthorizationLevel.Anonymous, "patch", Route = "testhooks/instance-config")] HttpRequestData req) {
_log.Info("Patch instance config"); _log.Info($"Patch instance config");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var newInstanceConfig = JsonSerializer.Deserialize<InstanceConfig>(s!, EntityConverter.GetJsonSerializerOptions()); var newInstanceConfig = JsonSerializer.Deserialize<InstanceConfig>(s!, EntityConverter.GetJsonSerializerOptions());

View File

@ -20,7 +20,7 @@ namespace ApiService.TestHooks {
[Function("GetPublicNicTestHook")] [Function("GetPublicNicTestHook")]
public async Task<HttpResponseData> GetPublicNic([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/ipOps/publicNic")] HttpRequestData req) { public async Task<HttpResponseData> GetPublicNic([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/ipOps/publicNic")] HttpRequestData req) {
_log.Info("Get public nic"); _log.Info($"Get public nic");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
@ -39,7 +39,7 @@ namespace ApiService.TestHooks {
[Function("GetIpTestHook")] [Function("GetIpTestHook")]
public async Task<HttpResponseData> GetIp([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/ipOps/ip")] HttpRequestData req) { public async Task<HttpResponseData> GetIp([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/ipOps/ip")] HttpRequestData req) {
_log.Info("Get public nic"); _log.Info($"Get public nic");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
@ -60,7 +60,7 @@ namespace ApiService.TestHooks {
[Function("DeletePublicNicTestHook")] [Function("DeletePublicNicTestHook")]
public async Task<HttpResponseData> DeletePublicNic([HttpTrigger(AuthorizationLevel.Anonymous, "delete", Route = "testhooks/ipOps/publicNic")] HttpRequestData req) { public async Task<HttpResponseData> DeletePublicNic([HttpTrigger(AuthorizationLevel.Anonymous, "delete", Route = "testhooks/ipOps/publicNic")] HttpRequestData req) {
_log.Info("Get public nic"); _log.Info($"Get public nic");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
@ -80,7 +80,7 @@ namespace ApiService.TestHooks {
[Function("DeleteIpTestHook")] [Function("DeleteIpTestHook")]
public async Task<HttpResponseData> DeleteIp([HttpTrigger(AuthorizationLevel.Anonymous, "delete", Route = "testhooks/ipOps/ip")] HttpRequestData req) { public async Task<HttpResponseData> DeleteIp([HttpTrigger(AuthorizationLevel.Anonymous, "delete", Route = "testhooks/ipOps/ip")] HttpRequestData req) {
_log.Info("Get public nic"); _log.Info($"Get public nic");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);

View File

@ -23,7 +23,7 @@ namespace ApiService.TestHooks {
[Function("JobTestHook")] [Function("JobTestHook")]
public async Task<HttpResponseData> GetJob([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/jobOps/job")] HttpRequestData req) { public async Task<HttpResponseData> GetJob([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/jobOps/job")] HttpRequestData req) {
_log.Info("Get job info"); _log.Info($"Get job info");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var jobId = Guid.Parse(query["jobId"]); var jobId = Guid.Parse(query["jobId"]);
@ -38,7 +38,7 @@ namespace ApiService.TestHooks {
[Function("SearchExpiredTestHook")] [Function("SearchExpiredTestHook")]
public async Task<HttpResponseData> SearchExpired([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/jobOps/searchExpired")] HttpRequestData req) { public async Task<HttpResponseData> SearchExpired([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/jobOps/searchExpired")] HttpRequestData req) {
_log.Info("Search expired jobs"); _log.Info($"Search expired jobs");
var jobs = await _jobOps.SearchExpired().ToListAsync(); var jobs = await _jobOps.SearchExpired().ToListAsync();
@ -50,7 +50,7 @@ namespace ApiService.TestHooks {
[Function("SearchStateTestHook")] [Function("SearchStateTestHook")]
public async Task<HttpResponseData> SearchState([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/jobOps/searchState")] HttpRequestData req) { public async Task<HttpResponseData> SearchState([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/jobOps/searchState")] HttpRequestData req) {
_log.Info("Search jobs by state"); _log.Info($"Search jobs by state");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var init = UriExtension.GetBool("init", query, false); var init = UriExtension.GetBool("init", query, false);

View File

@ -23,7 +23,7 @@ namespace ApiService.TestHooks {
[Function("MonitorSettingsTestHook")] [Function("MonitorSettingsTestHook")]
public async Task<HttpResponseData> GetMonitorSettings([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/logAnalytics/monitorSettings")] HttpRequestData req) { public async Task<HttpResponseData> GetMonitorSettings([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/logAnalytics/monitorSettings")] HttpRequestData req) {
_log.Info("Get monitor settings"); _log.Info($"Get monitor settings");
var monitorSettings = await _logAnalytics.GetMonitorSettings(); var monitorSettings = await _logAnalytics.GetMonitorSettings();
@ -35,7 +35,7 @@ namespace ApiService.TestHooks {
[Function("WorkspaceIdTestHook")] [Function("WorkspaceIdTestHook")]
public async Task<HttpResponseData> GetWorkspaceId([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/logAnalytics/workspaceId")] HttpRequestData req) { public async Task<HttpResponseData> GetWorkspaceId([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/logAnalytics/workspaceId")] HttpRequestData req) {
_log.Info("Get workspace id"); _log.Info($"Get workspace id");
var workspaceId = _logAnalytics.GetWorkspaceId(); var workspaceId = _logAnalytics.GetWorkspaceId();
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);

View File

@ -25,7 +25,7 @@ namespace ApiService.TestHooks {
[Function("GetByMachineIdTestHook")] [Function("GetByMachineIdTestHook")]
public async Task<HttpResponseData> GetByMachineId([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/nodeOperations/getByMachineId")] HttpRequestData req) { public async Task<HttpResponseData> GetByMachineId([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/nodeOperations/getByMachineId")] HttpRequestData req) {
_log.Info("Get node by machine id"); _log.Info($"Get node by machine id");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var machineId = query["machineId"]; var machineId = query["machineId"];
@ -40,7 +40,7 @@ namespace ApiService.TestHooks {
[Function("CanProcessNewWorkTestHook")] [Function("CanProcessNewWorkTestHook")]
public async Task<HttpResponseData> CanProcessNewWork([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/canProcessNewWork")] HttpRequestData req) { public async Task<HttpResponseData> CanProcessNewWork([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/canProcessNewWork")] HttpRequestData req) {
_log.Info("Can process new work"); _log.Info($"Can process new work");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions()); var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions());
@ -54,7 +54,7 @@ namespace ApiService.TestHooks {
[Function("IsOutdatedTestHook")] [Function("IsOutdatedTestHook")]
public async Task<HttpResponseData> IsOutdated([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/isOutdated")] HttpRequestData req) { public async Task<HttpResponseData> IsOutdated([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/isOutdated")] HttpRequestData req) {
_log.Info("Is outdated"); _log.Info($"Is outdated");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions()); var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions());
@ -68,7 +68,7 @@ namespace ApiService.TestHooks {
[Function("IsTooOldTestHook")] [Function("IsTooOldTestHook")]
public async Task<HttpResponseData> IsTooOld([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/isTooOld")] HttpRequestData req) { public async Task<HttpResponseData> IsTooOld([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/isTooOld")] HttpRequestData req) {
_log.Info("Is too old"); _log.Info($"Is too old");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions()); var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions());
@ -81,7 +81,7 @@ namespace ApiService.TestHooks {
[Function("CouldShrinkScalesetTestHook")] [Function("CouldShrinkScalesetTestHook")]
public async Task<HttpResponseData> CouldShrinkScaleset([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/couldShrinkScaleset")] HttpRequestData req) { public async Task<HttpResponseData> CouldShrinkScaleset([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/couldShrinkScaleset")] HttpRequestData req) {
_log.Info("Could shrink scaleset"); _log.Info($"Could shrink scaleset");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions()); var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions());
@ -95,7 +95,7 @@ namespace ApiService.TestHooks {
[Function("SetHaltTestHook")] [Function("SetHaltTestHook")]
public async Task<HttpResponseData> SetHalt([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/setHalt")] HttpRequestData req) { public async Task<HttpResponseData> SetHalt([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/setHalt")] HttpRequestData req) {
_log.Info("set halt"); _log.Info($"set halt");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions()); var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions());
@ -108,7 +108,7 @@ namespace ApiService.TestHooks {
[Function("SetStateTestHook")] [Function("SetStateTestHook")]
public async Task<HttpResponseData> SetState([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/setState")] HttpRequestData req) { public async Task<HttpResponseData> SetState([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/setState")] HttpRequestData req) {
_log.Info("set state"); _log.Info($"set state");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var state = Enum.Parse<NodeState>(query["state"]); var state = Enum.Parse<NodeState>(query["state"]);
@ -125,7 +125,7 @@ namespace ApiService.TestHooks {
[Function("ToReimageTestHook")] [Function("ToReimageTestHook")]
public async Task<HttpResponseData> ToReimage([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/toReimage")] HttpRequestData req) { public async Task<HttpResponseData> ToReimage([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/toReimage")] HttpRequestData req) {
_log.Info("to reimage"); _log.Info($"to reimage");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var done = UriExtension.GetBool("done", query, false); var done = UriExtension.GetBool("done", query, false);
@ -141,7 +141,7 @@ namespace ApiService.TestHooks {
[Function("SendStopIfFreeTestHook")] [Function("SendStopIfFreeTestHook")]
public async Task<HttpResponseData> SendStopIfFree([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/sendStopIfFree")] HttpRequestData req) { public async Task<HttpResponseData> SendStopIfFree([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/sendStopIfFree")] HttpRequestData req) {
_log.Info("send stop if free"); _log.Info($"send stop if free");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions()); var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions());
@ -155,7 +155,7 @@ namespace ApiService.TestHooks {
[Function("SearchStatesTestHook")] [Function("SearchStatesTestHook")]
public async Task<HttpResponseData> SearchStates([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/searchStates")] HttpRequestData req) { public async Task<HttpResponseData> SearchStates([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/searchStates")] HttpRequestData req) {
_log.Info("search states"); _log.Info($"search states");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
Guid? poolId = UriExtension.GetGuid("poolId", query); Guid? poolId = UriExtension.GetGuid("poolId", query);
@ -181,7 +181,7 @@ namespace ApiService.TestHooks {
[Function("DeleteNodeTestHook")] [Function("DeleteNodeTestHook")]
public async Task<HttpResponseData> DeleteNode([HttpTrigger(AuthorizationLevel.Anonymous, "delete", Route = "testhooks/nodeOperations/node")] HttpRequestData req) { public async Task<HttpResponseData> DeleteNode([HttpTrigger(AuthorizationLevel.Anonymous, "delete", Route = "testhooks/nodeOperations/node")] HttpRequestData req) {
_log.Info("delete node"); _log.Info($"delete node");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions()); var node = JsonSerializer.Deserialize<Node>(s!, EntityConverter.GetJsonSerializerOptions());
@ -193,7 +193,7 @@ namespace ApiService.TestHooks {
[Function("ReimageLongLivedNodesTestHook")] [Function("ReimageLongLivedNodesTestHook")]
public async Task<HttpResponseData> ReimageLongLivedNodes([HttpTrigger(AuthorizationLevel.Anonymous, "patch", Route = "testhooks/nodeOperations/reimageLongLivedNodes")] HttpRequestData req) { public async Task<HttpResponseData> ReimageLongLivedNodes([HttpTrigger(AuthorizationLevel.Anonymous, "patch", Route = "testhooks/nodeOperations/reimageLongLivedNodes")] HttpRequestData req) {
_log.Info("reimage long lived nodes"); _log.Info($"reimage long lived nodes");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var r = _nodeOps.ReimageLongLivedNodes(Guid.Parse(query["scaleSetId"])); var r = _nodeOps.ReimageLongLivedNodes(Guid.Parse(query["scaleSetId"]));
@ -205,7 +205,7 @@ namespace ApiService.TestHooks {
[Function("CreateTestHook")] [Function("CreateTestHook")]
public async Task<HttpResponseData> CreateNode([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/nodeOperations/node")] HttpRequestData req) { public async Task<HttpResponseData> CreateNode([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/nodeOperations/node")] HttpRequestData req) {
_log.Info("create node"); _log.Info($"create node");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
@ -232,7 +232,7 @@ namespace ApiService.TestHooks {
[Function("GetDeadNodesTestHook")] [Function("GetDeadNodesTestHook")]
public async Task<HttpResponseData> GetDeadNodes([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/getDeadNodes")] HttpRequestData req) { public async Task<HttpResponseData> GetDeadNodes([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/nodeOperations/getDeadNodes")] HttpRequestData req) {
_log.Info("get dead nodes"); _log.Info($"get dead nodes");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
@ -249,7 +249,7 @@ namespace ApiService.TestHooks {
[Function("MarkTasksStoppedEarly")] [Function("MarkTasksStoppedEarly")]
public async Task<HttpResponseData> MarkTasksStoppedEarly([HttpTrigger(AuthorizationLevel.Anonymous, "patch", Route = "testhooks/nodeOperations/markTasksStoppedEarly")] HttpRequestData req) { public async Task<HttpResponseData> MarkTasksStoppedEarly([HttpTrigger(AuthorizationLevel.Anonymous, "patch", Route = "testhooks/nodeOperations/markTasksStoppedEarly")] HttpRequestData req) {
_log.Info("mark tasks stopped early"); _log.Info($"mark tasks stopped early");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var markTasks = JsonSerializer.Deserialize<MarkTasks>(s!, EntityConverter.GetJsonSerializerOptions()); var markTasks = JsonSerializer.Deserialize<MarkTasks>(s!, EntityConverter.GetJsonSerializerOptions());

View File

@ -23,7 +23,7 @@ namespace ApiService.TestHooks {
[Function("NewFilesTestHook")] [Function("NewFilesTestHook")]
public async Task<HttpResponseData> NewFiles([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/notificationOperations/newFiles")] HttpRequestData req) { public async Task<HttpResponseData> NewFiles([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/notificationOperations/newFiles")] HttpRequestData req) {
_log.Info("new files"); _log.Info($"new files");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var container = query["container"]; var container = query["container"];
@ -37,7 +37,7 @@ namespace ApiService.TestHooks {
[Function("GetNotificationsTestHook")] [Function("GetNotificationsTestHook")]
public async Task<HttpResponseData> GetNotifications([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/notificationOperations/getNotifications")] HttpRequestData req) { public async Task<HttpResponseData> GetNotifications([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/notificationOperations/getNotifications")] HttpRequestData req) {
_log.Info("get notifications"); _log.Info($"get notifications");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
@ -54,7 +54,7 @@ namespace ApiService.TestHooks {
[Function("GetQueueTasksTestHook")] [Function("GetQueueTasksTestHook")]
public async Task<HttpResponseData> GetQueueTasksTestHook([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/notificationOperations/getQueueTasks")] HttpRequestData req) { public async Task<HttpResponseData> GetQueueTasksTestHook([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/notificationOperations/getQueueTasks")] HttpRequestData req) {
_log.Info("get queue tasks"); _log.Info($"get queue tasks");
var queueuTasks = _notificationOps.GetQueueTasks(); var queueuTasks = _notificationOps.GetQueueTasks();
var json = JsonSerializer.Serialize(await queueuTasks.ToListAsync(), EntityConverter.GetJsonSerializerOptions()); var json = JsonSerializer.Serialize(await queueuTasks.ToListAsync(), EntityConverter.GetJsonSerializerOptions());
@ -66,7 +66,7 @@ namespace ApiService.TestHooks {
[Function("GetRegressionReportTaskTestHook")] [Function("GetRegressionReportTaskTestHook")]
public async Task<HttpResponseData> GetRegressionReportTask([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/notificationOperations/getRegressionReportTask")] HttpRequestData req) { public async Task<HttpResponseData> GetRegressionReportTask([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "testhooks/notificationOperations/getRegressionReportTask")] HttpRequestData req) {
_log.Info("get regression report task"); _log.Info($"get regression report task");
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var report = JsonSerializer.Deserialize<RegressionReport>(s!, EntityConverter.GetJsonSerializerOptions()); var report = JsonSerializer.Deserialize<RegressionReport>(s!, EntityConverter.GetJsonSerializerOptions());

View File

@ -23,7 +23,7 @@ namespace ApiService.TestHooks {
[Function("GetNsgTestHook")] [Function("GetNsgTestHook")]
public async Task<HttpResponseData> GetNsg([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/nsgOperations/nsg")] HttpRequestData req) { public async Task<HttpResponseData> GetNsg([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/nsgOperations/nsg")] HttpRequestData req) {
_log.Info("get nsg"); _log.Info($"get nsg");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var nsg = await _nsgOperations.GetNsg(query["name"]); var nsg = await _nsgOperations.GetNsg(query["name"]);
@ -41,7 +41,7 @@ namespace ApiService.TestHooks {
[Function("ListNsgsTestHook")] [Function("ListNsgsTestHook")]
public async Task<HttpResponseData> ListNsgs([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/nsgOperations/listNsgs")] HttpRequestData req) { public async Task<HttpResponseData> ListNsgs([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/nsgOperations/listNsgs")] HttpRequestData req) {
_log.Info("list nsgs"); _log.Info($"list nsgs");
var nsgs = await _nsgOperations.ListNsgs().ToListAsync(); var nsgs = await _nsgOperations.ListNsgs().ToListAsync();
@ -53,7 +53,7 @@ namespace ApiService.TestHooks {
[Function("DeleteNsgTestHook")] [Function("DeleteNsgTestHook")]
public async Task<HttpResponseData> DeleteNsg([HttpTrigger(AuthorizationLevel.Anonymous, "delete", Route = "testhooks/nsgOperations/nsg")] HttpRequestData req) { public async Task<HttpResponseData> DeleteNsg([HttpTrigger(AuthorizationLevel.Anonymous, "delete", Route = "testhooks/nsgOperations/nsg")] HttpRequestData req) {
_log.Info("delete nsgs"); _log.Info($"delete nsgs");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var name = query["name"]; var name = query["name"];

View File

@ -22,7 +22,7 @@ namespace ApiService.TestHooks {
[Function("GetPoolTestHook")] [Function("GetPoolTestHook")]
public async Task<HttpResponseData> GetPool([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/poolOperations/pool")] HttpRequestData req) { public async Task<HttpResponseData> GetPool([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/poolOperations/pool")] HttpRequestData req) {
_log.Info("get pool"); _log.Info($"get pool");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var poolRes = await _poolOps.GetByName(PoolName.Parse(query["name"])); var poolRes = await _poolOps.GetByName(PoolName.Parse(query["name"]));

View File

@ -22,7 +22,7 @@ namespace ApiService.TestHooks {
[Function("SearchForwardTestHook")] [Function("SearchForwardTestHook")]
public async Task<HttpResponseData> SearchForward([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/proxyForwardOperations/search")] HttpRequestData req) { public async Task<HttpResponseData> SearchForward([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/proxyForwardOperations/search")] HttpRequestData req) {
_log.Info("search proxy forward"); _log.Info($"search proxy forward");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);

View File

@ -28,23 +28,23 @@ public class TestHooks {
[Function("_Info")] [Function("_Info")]
public async Task<HttpResponseData> Info([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/info")] HttpRequestData req) { public async Task<HttpResponseData> Info([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/info")] HttpRequestData req) {
_log.Info("Creating function info response"); _log.Info($"Creating function info response");
var response = req.CreateResponse(); var response = req.CreateResponse();
FunctionInfo info = new( FunctionInfo info = new(
$"{_config.OneFuzzInstanceName}", $"{_config.OneFuzzInstanceName}",
$"{_config.OneFuzzResourceGroup}", $"{_config.OneFuzzResourceGroup}",
Environment.GetEnvironmentVariable("WEBSITE_SLOT_NAME")); Environment.GetEnvironmentVariable("WEBSITE_SLOT_NAME"));
_log.Info("Returning function info"); _log.Info($"Returning function info");
await response.WriteAsJsonAsync(info); await response.WriteAsJsonAsync(info);
_log.Info("Returned function info"); _log.Info($"Returned function info");
return response; return response;
} }
[Function("GetKeyvaultAddress")] [Function("GetKeyvaultAddress")]
public async Task<HttpResponseData> GetKeyVaultAddress([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/secrets/keyvaultaddress")] HttpRequestData req) { public async Task<HttpResponseData> GetKeyVaultAddress([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/secrets/keyvaultaddress")] HttpRequestData req) {
_log.Info("Getting keyvault address"); _log.Info($"Getting keyvault address");
var addr = _secretOps.GetKeyvaultAddress(); var addr = _secretOps.GetKeyvaultAddress();
var resp = req.CreateResponse(HttpStatusCode.OK); var resp = req.CreateResponse(HttpStatusCode.OK);
await resp.WriteAsJsonAsync(addr); await resp.WriteAsJsonAsync(addr);
@ -56,7 +56,7 @@ public class TestHooks {
var s = await req.ReadAsStringAsync(); var s = await req.ReadAsStringAsync();
var secretData = JsonSerializer.Deserialize<SecretData<string>>(s!, EntityConverter.GetJsonSerializerOptions()); var secretData = JsonSerializer.Deserialize<SecretData<string>>(s!, EntityConverter.GetJsonSerializerOptions());
if (secretData is null) { if (secretData is null) {
_log.Error("Secret data is null"); _log.Error($"Secret data is null");
return req.CreateResponse(HttpStatusCode.BadRequest); return req.CreateResponse(HttpStatusCode.BadRequest);
} else { } else {
_log.Info($"Saving secret data in the keyvault"); _log.Info($"Saving secret data in the keyvault");

View File

@ -23,7 +23,7 @@ namespace ApiService.TestHooks {
[Function("ListInstanceIdsTesHook")] [Function("ListInstanceIdsTesHook")]
public async Task<HttpResponseData> ListInstanceIds([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/vmssOperations/listInstanceIds")] HttpRequestData req) { public async Task<HttpResponseData> ListInstanceIds([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/vmssOperations/listInstanceIds")] HttpRequestData req) {
_log.Info("list instance ids"); _log.Info($"list instance ids");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var name = UriExtension.GetGuid("name", query) ?? throw new Exception("name must be set"); var name = UriExtension.GetGuid("name", query) ?? throw new Exception("name must be set");
var ids = await _vmssOps.ListInstanceIds(name); var ids = await _vmssOps.ListInstanceIds(name);
@ -36,7 +36,7 @@ namespace ApiService.TestHooks {
[Function("GetInstanceIdsTesHook")] [Function("GetInstanceIdsTesHook")]
public async Task<HttpResponseData> GetInstanceId([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/vmssOperations/getInstanceId")] HttpRequestData req) { public async Task<HttpResponseData> GetInstanceId([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "testhooks/vmssOperations/getInstanceId")] HttpRequestData req) {
_log.Info("list instance ids"); _log.Info($"list instance ids");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var name = UriExtension.GetGuid("name", query) ?? throw new Exception("name must be set"); var name = UriExtension.GetGuid("name", query) ?? throw new Exception("name must be set");
var vmId = UriExtension.GetGuid("vmId", query) ?? throw new Exception("vmId must be set"); var vmId = UriExtension.GetGuid("vmId", query) ?? throw new Exception("vmId must be set");
@ -50,7 +50,7 @@ namespace ApiService.TestHooks {
[Function("UpdateScaleInProtectionTestHook")] [Function("UpdateScaleInProtectionTestHook")]
public async Task<HttpResponseData> UpdateScaleInProtection([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/vmssOperations/updateScaleInProtection")] HttpRequestData req) { public async Task<HttpResponseData> UpdateScaleInProtection([HttpTrigger(AuthorizationLevel.Anonymous, "put", Route = "testhooks/vmssOperations/updateScaleInProtection")] HttpRequestData req) {
_log.Info("list instance ids"); _log.Info($"list instance ids");
var query = UriExtension.GetQueryComponents(req.Url); var query = UriExtension.GetQueryComponents(req.Url);
var name = UriExtension.GetGuid("name", query) ?? throw new Exception("name must be set"); var name = UriExtension.GetGuid("name", query) ?? throw new Exception("name must be set");
var vmId = UriExtension.GetGuid("vmId", query) ?? throw new Exception("vmId must be set"); var vmId = UriExtension.GetGuid("vmId", query) ?? throw new Exception("vmId must be set");

View File

@ -83,11 +83,11 @@ public class UserCredentials : IUserCredentials {
return OneFuzzResult<UserInfo>.Ok(new(applicationId, objectId, upn)); return OneFuzzResult<UserInfo>.Ok(new(applicationId, objectId, upn));
} else { } else {
var tenantsStr = allowedTenants.OkV is null ? "null" : String.Join(';', allowedTenants.OkV!); var tenantsStr = allowedTenants.OkV is null ? "null" : String.Join(';', allowedTenants.OkV!);
_log.Error($"issuer not from allowed tenant. issuer: {token.Issuer} - tenants: {tenantsStr}"); _log.Error($"issuer not from allowed tenant. issuer: {token.Issuer:Tag:Issuer} - tenants: {tenantsStr:Tag:Tenants}");
return OneFuzzResult<UserInfo>.Error(ErrorCode.INVALID_REQUEST, new[] { "unauthorized AAD issuer" }); return OneFuzzResult<UserInfo>.Error(ErrorCode.INVALID_REQUEST, new[] { "unauthorized AAD issuer" });
} }
} else { } else {
_log.Error("Failed to get allowed tenants"); _log.Error($"Failed to get allowed tenants due to {allowedTenants.ErrorV:Tag:Error}");
return OneFuzzResult<UserInfo>.Error(allowedTenants.ErrorV); return OneFuzzResult<UserInfo>.Error(allowedTenants.ErrorV);
} }
} }

View File

@ -38,7 +38,7 @@ public class Auth {
File.Delete(tmpFile); File.Delete(tmpFile);
} catch (Exception ex) { } catch (Exception ex) {
//bad but not worth the failure //bad but not worth the failure
log.Warning($"failed to delete temp file {tmpFile} due to {ex}"); log.Warning($"failed to delete temp file {tmpFile:Tag:TempFile} due to {ex:Tag:Exception}");
} }
tmpFile = tmpFile + ".key"; tmpFile = tmpFile + ".key";
var startInfo = SshKeyGenProcConfig(tmpFile); var startInfo = SshKeyGenProcConfig(tmpFile);
@ -56,13 +56,13 @@ public class Auth {
File.Delete(tmpFile); File.Delete(tmpFile);
} catch (Exception ex) { } catch (Exception ex) {
//bad but not worth failing //bad but not worth failing
log.Warning($"failed to delete temp file {tmpFile} due to {ex}"); log.Warning($"failed to delete temp file {tmpFile:Tag:TempFile} due to {ex:Tag:Exception}");
} }
try { try {
File.Delete(tmpFilePub); File.Delete(tmpFilePub);
} catch (Exception ex) { } catch (Exception ex) {
//bad but not worth failing //bad but not worth failing
log.Warning($"failed to delete temp file {tmpFilePub} due to {ex}"); log.Warning($"failed to delete temp file {tmpFilePub:Tag:TempFile} due to {ex:Tag:Exception}");
} }
return (priv, pub.Trim()); return (priv, pub.Trim());
} else { } else {

View File

@ -75,7 +75,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
var r = await Insert(entry); var r = await Insert(entry);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to save auto-scale record for scaleset ID: {scalesetId}, minAmount: {minAmount}, maxAmount: {maxAmount}, defaultAmount: {defaultAmount}, scaleOutAmount: {scaleOutAmount}, scaleOutCooldown: {scaleOutCooldown}, scaleInAmount: {scaleInAmount}, scaleInCooldown: {scaleInCooldown}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to save auto-scale record {scalesetId:Tag:ScalesetId} {minAmount:Tag:MinAmount} {maxAmount:Tag:MaxAmount} {defaultAmount:Tag:DefaultAmount} {scaleOutAmount:Tag:ScaleoutAmount} {scaleOutCooldown:Tag:ScaleoutCooldown} {scaleInAmount:Tag:ScaleinAmount} {scaleInCooldown:Tag:ScaleInCooldown}");
} }
return entry; return entry;
} }
@ -85,13 +85,13 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
var autoscale = await GetEntityAsync(scalesetId.ToString(), scalesetId.ToString()); var autoscale = await GetEntityAsync(scalesetId.ToString(), scalesetId.ToString());
return autoscale; return autoscale;
} catch (Exception ex) { } catch (Exception ex) {
_logTracer.Exception(ex, "Failed to get auto-scale entity"); _logTracer.Exception(ex, $"Failed to get auto-scale entity {scalesetId:Tag:ScalesetId}");
return null; return null;
} }
} }
public async Async.Task<OneFuzzResult<AutoscaleProfile>> GetAutoScaleProfile(Guid scalesetId) { public async Async.Task<OneFuzzResult<AutoscaleProfile>> GetAutoScaleProfile(Guid scalesetId) {
_logTracer.Info($"getting scaleset for existing auto-scale resources {scalesetId}"); _logTracer.Info($"getting scaleset for existing auto-scale resources {scalesetId:Tag:ScalesetId}");
var settings = _context.Creds.GetResourceGroupResource().GetAutoscaleSettings(); var settings = _context.Creds.GetResourceGroupResource().GetAutoscaleSettings();
if (settings is null) { if (settings is null) {
return OneFuzzResult<AutoscaleProfile>.Error(ErrorCode.INVALID_CONFIGURATION, $"could not find any auto-scale settings for the resource group"); return OneFuzzResult<AutoscaleProfile>.Error(ErrorCode.INVALID_CONFIGURATION, $"could not find any auto-scale settings for the resource group");
@ -103,7 +103,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
return OneFuzzResult<AutoscaleProfile>.Error(ErrorCode.INVALID_CONFIGURATION, $"found {setting.Data.Profiles.Count} auto-scale profiles for {scalesetId}"); return OneFuzzResult<AutoscaleProfile>.Error(ErrorCode.INVALID_CONFIGURATION, $"found {setting.Data.Profiles.Count} auto-scale profiles for {scalesetId}");
} else { } else {
if (setting.Data.Profiles.Count != 1) { if (setting.Data.Profiles.Count != 1) {
_logTracer.Warning($"Found more than one autoscaling profile for scaleset {scalesetId}"); _logTracer.Warning($"Found more than one autoscaling profile for scaleset {scalesetId:Tag:ScalesetId}");
} }
return OneFuzzResult<AutoscaleProfile>.Ok(setting.Data.Profiles.First()); return OneFuzzResult<AutoscaleProfile>.Ok(setting.Data.Profiles.First());
} }
@ -114,7 +114,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
} }
public async Async.Task<OneFuzzResultVoid> AddAutoScaleToVmss(Guid vmss, AutoscaleProfile autoScaleProfile) { public async Async.Task<OneFuzzResultVoid> AddAutoScaleToVmss(Guid vmss, AutoscaleProfile autoScaleProfile) {
_logTracer.Info($"Checking scaleset {vmss} for existing auto scale resource"); _logTracer.Info($"Checking scaleset {vmss:Tag:ScalesetId} for existing auto scale resource");
var existingAutoScaleResource = GetAutoscaleSettings(vmss); var existingAutoScaleResource = GetAutoscaleSettings(vmss);
@ -131,7 +131,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
return OneFuzzResultVoid.Error(autoScaleResource.ErrorV); return OneFuzzResultVoid.Error(autoScaleResource.ErrorV);
} }
var workspaceId = _context.LogAnalytics.GetWorkspaceId().ToString(); var workspaceId = _context.LogAnalytics.GetWorkspaceId().ToString();
_logTracer.Info($"Setting up diagnostics for id: {autoScaleResource.OkV.Id!} with name: {autoScaleResource.OkV.Data.Name} and workspace id: {workspaceId}"); _logTracer.Info($"Setting up diagnostics for {autoScaleResource.OkV.Id!:Tag:AutoscaleResourceId} - {autoScaleResource.OkV.Data.Name:Tag:AutoscaleResourceName} - {workspaceId:Tag:WorkspaceId}");
var diagnosticsResource = await SetupAutoScaleDiagnostics(autoScaleResource.OkV, workspaceId); var diagnosticsResource = await SetupAutoScaleDiagnostics(autoScaleResource.OkV, workspaceId);
if (!diagnosticsResource.IsOk) { if (!diagnosticsResource.IsOk) {
@ -141,7 +141,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
private async Async.Task<OneFuzzResult<AutoscaleSettingResource>> CreateAutoScaleResourceFor(Guid resourceId, Region location, AutoscaleProfile profile) { private async Async.Task<OneFuzzResult<AutoscaleSettingResource>> CreateAutoScaleResourceFor(Guid resourceId, Region location, AutoscaleProfile profile) {
_logTracer.Info($"Creating auto-scale resource for: {resourceId}"); _logTracer.Info($"Creating auto-scale resource for: {resourceId:Tag:AutoscaleResourceId}");
var resourceGroup = _context.Creds.GetBaseResourceGroup(); var resourceGroup = _context.Creds.GetBaseResourceGroup();
var subscription = _context.Creds.GetSubscription(); var subscription = _context.Creds.GetSubscription();
@ -157,7 +157,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
var autoScaleResource = await autoScaleSettings.CreateOrUpdateAsync(WaitUntil.Started, Guid.NewGuid().ToString(), parameters); var autoScaleResource = await autoScaleSettings.CreateOrUpdateAsync(WaitUntil.Started, Guid.NewGuid().ToString(), parameters);
if (autoScaleResource != null && autoScaleResource.HasValue) { if (autoScaleResource != null && autoScaleResource.HasValue) {
_logTracer.Info($"Successfully created auto scale resource {autoScaleResource.Value.Id} for {resourceId}"); _logTracer.Info($"Successfully created auto scale resource {autoScaleResource.Value.Id:Tag:AutoscaleResourceId} for {resourceId:Tag:ResourceId}");
return OneFuzzResult<AutoscaleSettingResource>.Ok(autoScaleResource.Value); return OneFuzzResult<AutoscaleSettingResource>.Ok(autoScaleResource.Value);
} }
@ -168,7 +168,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
} catch (RequestFailedException ex) when (ex.Status == 409 && ex.Message.Contains("\"code\":\"SettingAlreadyExists\"")) { } catch (RequestFailedException ex) when (ex.Status == 409 && ex.Message.Contains("\"code\":\"SettingAlreadyExists\"")) {
var existingAutoScaleResource = GetAutoscaleSettings(resourceId); var existingAutoScaleResource = GetAutoscaleSettings(resourceId);
if (existingAutoScaleResource.IsOk) { if (existingAutoScaleResource.IsOk) {
_logTracer.Info($"Successfully created auto scale resource {existingAutoScaleResource.OkV!.Data.Id} for {resourceId}"); _logTracer.Info($"Successfully created auto scale resource {existingAutoScaleResource.OkV!.Data.Id:Tag:AutoscaleResourceId} for {resourceId:Tag:ResourceId}");
return OneFuzzResult<AutoscaleSettingResource>.Ok(existingAutoScaleResource.OkV!); return OneFuzzResult<AutoscaleSettingResource>.Ok(existingAutoScaleResource.OkV!);
} else { } else {
return existingAutoScaleResource.ErrorV; return existingAutoScaleResource.ErrorV;
@ -287,7 +287,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
} }
public OneFuzzResult<AutoscaleSettingResource?> GetAutoscaleSettings(Guid vmss) { public OneFuzzResult<AutoscaleSettingResource?> GetAutoscaleSettings(Guid vmss) {
_logTracer.Info($"Checking scaleset {vmss} for existing auto scale resource"); _logTracer.Info($"Checking scaleset {vmss:Tag:ScalesetId} for existing auto scale resource");
try { try {
var autoscale = _context.Creds.GetResourceGroupResource().GetAutoscaleSettings() var autoscale = _context.Creds.GetResourceGroupResource().GetAutoscaleSettings()
.ToEnumerable() .ToEnumerable()
@ -295,7 +295,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
.FirstOrDefault(); .FirstOrDefault();
if (autoscale != null) { if (autoscale != null) {
_logTracer.Info($"Found autoscale settings for {vmss}"); _logTracer.Info($"Found autoscale settings for {vmss:Tag:ScalesetId}");
return OneFuzzResult.Ok<AutoscaleSettingResource?>(autoscale); return OneFuzzResult.Ok<AutoscaleSettingResource?>(autoscale);
} }
@ -307,7 +307,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
} }
public async Task<OneFuzzResultVoid> UpdateAutoscale(AutoscaleSettingData autoscale) { public async Task<OneFuzzResultVoid> UpdateAutoscale(AutoscaleSettingData autoscale) {
_logTracer.Info($"Updating auto scale resource: {autoscale.Name}"); _logTracer.Info($"Updating auto scale resource: {autoscale.Name:Tag:AutoscaleSettingName}");
try { try {
var newResource = await _context.Creds.GetResourceGroupResource().GetAutoscaleSettings().CreateOrUpdateAsync( var newResource = await _context.Creds.GetResourceGroupResource().GetAutoscaleSettings().CreateOrUpdateAsync(
@ -316,7 +316,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
autoscale autoscale
); );
_logTracer.Info($"Successfully updated auto scale resource: {autoscale.Name}"); _logTracer.Info($"Successfully updated auto scale resource: {autoscale.Name:Tag:AutoscaleSettingName}");
} catch (RequestFailedException ex) { } catch (RequestFailedException ex) {
_logTracer.Exception(ex); _logTracer.Exception(ex);
return OneFuzzResultVoid.Error( return OneFuzzResultVoid.Error(
@ -375,7 +375,7 @@ public class AutoScaleOperations : Orm<AutoScale>, IAutoScaleOperations {
var r = await Replace(entry); var r = await Replace(entry);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace auto-scale record for scaleset ID: {scalesetId}, minAmount: {minAmount}, maxAmount: {maxAmount}, defaultAmount: {defaultAmount}, scaleOutAmount: {scaleOutAmount}, scaleOutCooldown: {scaleOutCooldown}, scaleInAmount: {scaleInAmount}, scaleInCooldown: {scaleInCooldown}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace auto-scale record {scalesetId:Tag:ScalesetId} {minAmount:Tag:MinAmount} {maxAmount:Tag:MaxAmount} {defaultAmount:Tag:DefaultAmount} {scaleOutAmount:Tag:ScaleoutAmount} {scaleOutCooldown:Tag:ScaleoutCooldown} {scaleInAmount:Tag:ScaleinAmount} {scaleInCooldown:Tag:ScaleinCooldown}");
} }
return entry; return entry;
} }

View File

@ -56,7 +56,7 @@ public class Config : IConfig {
} }
if (job.Config.Logs == null) { if (job.Config.Logs == null) {
_logTracer.Warning($"Missing log container: job_id {job.JobId}, task_id {task.TaskId}"); _logTracer.Warning($"Missing log container {job.JobId:Tag:JobId} - {task.TaskId:Tag:TaskId}");
return null; return null;
} }
@ -283,7 +283,7 @@ public class Config : IConfig {
if (definition.Features.Contains(TaskFeature.SupervisorExe) && config.Task.SupervisorExe == null) { if (definition.Features.Contains(TaskFeature.SupervisorExe) && config.Task.SupervisorExe == null) {
var err = "missing supervisor_exe"; var err = "missing supervisor_exe";
_logTracer.Error(err); _logTracer.Error($"{err}");
return ResultVoid<TaskConfigError>.Error(new TaskConfigError(err)); return ResultVoid<TaskConfigError>.Error(new TaskConfigError(err));
} }
@ -300,10 +300,8 @@ public class Config : IConfig {
} }
if (!CheckVal(definition.Vm.Compare, definition.Vm.Value, config.Pool!.Count)) { if (!CheckVal(definition.Vm.Compare, definition.Vm.Value, config.Pool!.Count)) {
var err = _logTracer.Error($"invalid vm count: expected {definition.Vm.Compare:Tag:Comparison} {definition.Vm.Value:Tag:Expected} {config.Pool.Count:Tag:Actual}");
$"invalid vm count: expected {definition.Vm.Compare} {definition.Vm.Value}, got {config.Pool.Count}"; return ResultVoid<TaskConfigError>.Error(new TaskConfigError($"invalid vm count: expected {definition.Vm.Compare} {definition.Vm.Value}, actual {config.Pool.Count}"));
_logTracer.Error(err);
return ResultVoid<TaskConfigError>.Error(new TaskConfigError(err));
} }
var pool = await _context.PoolOperations.GetByName(config.Pool.PoolName); var pool = await _context.PoolOperations.GetByName(config.Pool.PoolName);
@ -329,10 +327,8 @@ public class Config : IConfig {
if (config.Task.GeneratorExe.StartsWith(toolPath)) { if (config.Task.GeneratorExe.StartsWith(toolPath)) {
var generator = config.Task.GeneratorExe.Replace(toolPath, ""); var generator = config.Task.GeneratorExe.Replace(toolPath, "");
if (!await _containers.BlobExists(container.Name, generator, StorageType.Corpus)) { if (!await _containers.BlobExists(container.Name, generator, StorageType.Corpus)) {
var err = _logTracer.Error($"{config.Task.GeneratorExe:Tag:GeneratorExe} does not exist in the tools `{container.Name:Tag:Container}`");
$"generator_exe `{config.Task.GeneratorExe}` does not exist in the tools container `{container.Name}`"; return ResultVoid<TaskConfigError>.Error(new TaskConfigError($"generator_exe `{config.Task.GeneratorExe}` does not exist in the tools container `{container.Name}`"));
_logTracer.Error(err);
return ResultVoid<TaskConfigError>.Error(new TaskConfigError(err));
} }
} }
} }
@ -341,7 +337,7 @@ public class Config : IConfig {
if (definition.Features.Contains(TaskFeature.StatsFile)) { if (definition.Features.Contains(TaskFeature.StatsFile)) {
if (config.Task.StatsFile != null && config.Task.StatsFormat == null) { if (config.Task.StatsFile != null && config.Task.StatsFormat == null) {
var err2 = "using a stats_file requires a stats_format"; var err2 = "using a stats_file requires a stats_format";
_logTracer.Error(err2); _logTracer.Error($"{err2}");
return ResultVoid<TaskConfigError>.Error(new TaskConfigError(err2)); return ResultVoid<TaskConfigError>.Error(new TaskConfigError(err2));
} }
} }
@ -376,10 +372,7 @@ public class Config : IConfig {
var container = config.Containers!.FirstOrDefault(x => x.Type == ContainerType.Setup); var container = config.Containers!.FirstOrDefault(x => x.Type == ContainerType.Setup);
if (container != null) { if (container != null) {
if (!await _containers.BlobExists(container.Name, config.Task.TargetExe, StorageType.Corpus)) { if (!await _containers.BlobExists(container.Name, config.Task.TargetExe, StorageType.Corpus)) {
var err = _logTracer.Warning($"target_exe `{config.Task.TargetExe:Tag:TargetExe}` does not exist in the setup container `{container.Name:Tag:Container}`");
$"target_exe `{config.Task.TargetExe}` does not exist in the setup container `{container.Name}`";
_logTracer.Warning(err);
} }
} }

View File

@ -106,12 +106,12 @@ public class Containers : IContainers {
try { try {
var r = await cc.CreateAsync(metadata: metadata); var r = await cc.CreateAsync(metadata: metadata);
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_log.Error($"failed to create blob {containerName} due to {r.GetRawResponse().ReasonPhrase}"); _log.Error($"failed to create blob {containerName:Tag:ContainerName} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
} catch (RequestFailedException ex) when (ex.ErrorCode == "ContainerAlreadyExists") { } catch (RequestFailedException ex) when (ex.ErrorCode == "ContainerAlreadyExists") {
// note: resource exists error happens during creation if the container // note: resource exists error happens during creation if the container
// is being deleted // is being deleted
_log.Exception(ex, $"unable to create container. account: {account} container: {container} metadata: {metadata}"); _log.Exception(ex, $"unable to create container. {account:Tag:Account} {container:Tag:Container} {metadata:Tag:Metadata}");
return null; return null;
} }
@ -166,7 +166,11 @@ public class Containers : IContainers {
public async Async.Task SaveBlob(Container container, string name, string data, StorageType storageType) { public async Async.Task SaveBlob(Container container, string name, string data, StorageType storageType) {
var client = await FindContainer(container, storageType) ?? throw new Exception($"unable to find container: {container} - {storageType}"); var client = await FindContainer(container, storageType) ?? throw new Exception($"unable to find container: {container} - {storageType}");
_ = await client.GetBlobClient(name).UploadAsync(new BinaryData(data), overwrite: true); var blobSave = await client.GetBlobClient(name).UploadAsync(new BinaryData(data), overwrite: true);
var r = blobSave.GetRawResponse();
if (r.IsError) {
throw new Exception($"failed to save blob {name} due to {r.ReasonPhrase}");
}
} }
public virtual Async.Task<Guid> GetInstanceId() => _getInstanceId.Value; public virtual Async.Task<Guid> GetInstanceId() => _getInstanceId.Value;

View File

@ -22,21 +22,21 @@ public class DiskOperations : IDiskOperations {
public async Task<bool> DeleteDisk(string resourceGroup, string name) { public async Task<bool> DeleteDisk(string resourceGroup, string name) {
try { try {
_logTracer.Info($"deleting disks {resourceGroup} : {name}"); _logTracer.Info($"deleting disks {resourceGroup:Tag:ResourceGroup} - {name:Tag:Name}");
var disk = await _creds.GetResourceGroupResource().GetDiskAsync(name); var disk = await _creds.GetResourceGroupResource().GetDiskAsync(name);
if (disk != null) { if (disk != null) {
_ = await disk.Value.DeleteAsync(WaitUntil.Started); _ = await disk.Value.DeleteAsync(WaitUntil.Started);
return true; return true;
} }
} catch (Exception e) { } catch (Exception e) {
_logTracer.Error($"unable to delete disk: {name} {e.Message}"); _logTracer.Error($"unable to delete disk: {name:Tag:Name} {e.Message:Tag:Error}");
_logTracer.Exception(e); _logTracer.Exception(e);
} }
return false; return false;
} }
public DiskCollection ListDisks(string resourceGroup) { public DiskCollection ListDisks(string resourceGroup) {
_logTracer.Info($"listing disks {resourceGroup}"); _logTracer.Info($"listing disks {resourceGroup:Tag:ResourceGroup}");
return _creds.GetResourceGroupResource().GetDisks(); return _creds.GetResourceGroupResource().GetDisks();
} }
} }

View File

@ -67,9 +67,8 @@ public class EndpointAuthorization : IEndpointAuthorization {
} }
public async Async.Task<HttpResponseData> Reject(HttpRequestData req, UserInfo token) { public async Async.Task<HttpResponseData> Reject(HttpRequestData req, UserInfo token) {
_log.Error( var body = await req.ReadAsStringAsync();
$"reject token. url:{req.Url} token:{token} body:{await req.ReadAsStringAsync()}" _log.Error($"reject token. url:{req.Url:Tag:Url} token:{token:Tag:Token} body:{body:Tag:Body}");
);
return await _context.RequestHandling.NotOk( return await _context.RequestHandling.NotOk(
req, req,
@ -154,7 +153,7 @@ public class EndpointAuthorization : IEndpointAuthorization {
var membershipChecker = CreateGroupMembershipChecker(instanceConfig); var membershipChecker = CreateGroupMembershipChecker(instanceConfig);
var allowed = await membershipChecker.IsMember(rule.AllowedGroupsIds, memberId); var allowed = await membershipChecker.IsMember(rule.AllowedGroupsIds, memberId);
if (!allowed) { if (!allowed) {
_log.Error($"unauthorized access: {memberId} is not authorized to access {path}"); _log.Error($"unauthorized access: {memberId:Tag:MemberId} is not authorized to access {path:Tag:Path}");
return new Error( return new Error(
Code: ErrorCode.UNAUTHORIZED, Code: ErrorCode.UNAUTHORIZED,
Errors: new string[] { "not approved to use this endpoint" }); Errors: new string[] { "not approved to use this endpoint" });

View File

@ -64,7 +64,7 @@ namespace Microsoft.OneFuzz.Service {
public void LogEvent(BaseEvent anEvent) { public void LogEvent(BaseEvent anEvent) {
var serializedEvent = JsonSerializer.Serialize(anEvent, anEvent.GetType(), _options); var serializedEvent = JsonSerializer.Serialize(anEvent, anEvent.GetType(), _options);
_log.WithTag("Event Type", anEvent.GetEventType().ToString()).Info($"sending event: {anEvent.GetEventType()} - {serializedEvent}"); _log.Info($"sending event: {anEvent.GetEventType():Tag:EventType} - {serializedEvent}");
} }
} }

View File

@ -43,7 +43,7 @@ public class IpOperations : IIpOperations {
} }
public async Async.Task<NetworkInterfaceResource?> GetPublicNic(string resourceGroup, string name) { public async Async.Task<NetworkInterfaceResource?> GetPublicNic(string resourceGroup, string name) {
_logTracer.Info($"getting nic: {resourceGroup} {name}"); _logTracer.Info($"getting nic: {resourceGroup:Tag:ResourceGroup} - {name:Tag:Name}");
try { try {
return await _context.Creds.GetResourceGroupResource().GetNetworkInterfaceAsync(name); return await _context.Creds.GetResourceGroupResource().GetNetworkInterfaceAsync(name);
} catch (RequestFailedException) { } catch (RequestFailedException) {
@ -52,7 +52,7 @@ public class IpOperations : IIpOperations {
} }
public async Async.Task<PublicIPAddressResource?> GetIp(string resourceGroup, string name) { public async Async.Task<PublicIPAddressResource?> GetIp(string resourceGroup, string name) {
_logTracer.Info($"getting ip {resourceGroup}:{name}"); _logTracer.Info($"getting ip {resourceGroup:Tag:ResourceGroup} - {name:Tag:Name}");
try { try {
return await _context.Creds.GetResourceGroupResource().GetPublicIPAddressAsync(name); return await _context.Creds.GetResourceGroupResource().GetPublicIPAddressAsync(name);
} catch (RequestFailedException) { } catch (RequestFailedException) {
@ -61,35 +61,35 @@ public class IpOperations : IIpOperations {
} }
public async System.Threading.Tasks.Task DeleteNic(string resourceGroup, string name) { public async System.Threading.Tasks.Task DeleteNic(string resourceGroup, string name) {
_logTracer.Info($"deleting nic {resourceGroup}:{name}"); _logTracer.Info($"deleting nic {resourceGroup:Tag:ResourceGroup} - {name:Tag:Name}");
var networkInterface = await _context.Creds.GetResourceGroupResource().GetNetworkInterfaceAsync(name); var networkInterface = await _context.Creds.GetResourceGroupResource().GetNetworkInterfaceAsync(name);
try { try {
var r = await networkInterface.Value.DeleteAsync(WaitUntil.Started); var r = await networkInterface.Value.DeleteAsync(WaitUntil.Started);
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_logTracer.Error($"failed to start deleting nic {name} due to {r.GetRawResponse().ReasonPhrase}"); _logTracer.Error($"failed to start deleting nic {name:Tag:Name} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
} catch (RequestFailedException ex) { } catch (RequestFailedException ex) {
_logTracer.Exception(ex); _logTracer.Exception(ex);
if (ex.ErrorCode != "NicReservedForAnotherVm") { if (ex.ErrorCode != "NicReservedForAnotherVm") {
throw; throw;
} }
_logTracer.Warning($"unable to delete nic {resourceGroup}:{name} {ex.Message}"); _logTracer.Warning($"unable to delete nic {resourceGroup:Tag:ResourceGroup} - {name:Tag:Name} {ex.Message:Tag:Error}");
} }
} }
public async System.Threading.Tasks.Task DeleteIp(string resourceGroup, string name) { public async System.Threading.Tasks.Task DeleteIp(string resourceGroup, string name) {
_logTracer.Info($"deleting ip {resourceGroup}:{name}"); _logTracer.Info($"deleting ip {resourceGroup:Tag:ResourceGroup} - {name:Tag:Name}");
var publicIpAddressAsync = await _context.Creds.GetResourceGroupResource().GetPublicIPAddressAsync(name); var publicIpAddressAsync = await _context.Creds.GetResourceGroupResource().GetPublicIPAddressAsync(name);
var r = await publicIpAddressAsync.Value.DeleteAsync(WaitUntil.Started); var r = await publicIpAddressAsync.Value.DeleteAsync(WaitUntil.Started);
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_logTracer.Error($"failed to start deleting ip address due to {r.GetRawResponse().ReasonPhrase}"); _logTracer.Error($"failed to start deleting ip address {resourceGroup:Tag:ResourceGroup} - {name:Tag:Name} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
} }
public async Task<string?> GetScalesetInstanceIp(Guid scalesetId, Guid machineId) { public async Task<string?> GetScalesetInstanceIp(Guid scalesetId, Guid machineId) {
var instance = await _context.VmssOperations.GetInstanceId(scalesetId, machineId); var instance = await _context.VmssOperations.GetInstanceId(scalesetId, machineId);
if (!instance.IsOk) { if (!instance.IsOk) {
_logTracer.Verbose($"failed to get vmss {scalesetId} for instance id {machineId} due to {instance.ErrorV}"); _logTracer.Verbose($"failed to get vmss {scalesetId:Tag:ScalesetId} for instance id {machineId:Tag:MachineId} due to {instance.ErrorV:Tag:Error}");
return null; return null;
} }
@ -103,7 +103,7 @@ public class IpOperations : IIpOperations {
public async Task<string?> GetPublicIp(ResourceIdentifier resourceId) { public async Task<string?> GetPublicIp(ResourceIdentifier resourceId) {
// TODO: Parts of this function seem redundant, but I'm mirroring // TODO: Parts of this function seem redundant, but I'm mirroring
// the python code exactly. We should revisit this. // the python code exactly. We should revisit this.
_logTracer.Info($"getting ip for {resourceId}"); _logTracer.Info($"getting ip for {resourceId:Tag:ResourceId}");
try { try {
var resource = await (_context.Creds.GetData(_context.Creds.ParseResourceId(resourceId))); var resource = await (_context.Creds.GetData(_context.Creds.ParseResourceId(resourceId)));
var networkInterfaces = await _context.Creds.GetResourceGroupResource().GetNetworkInterfaceAsync( var networkInterfaces = await _context.Creds.GetResourceGroupResource().GetNetworkInterfaceAsync(
@ -129,7 +129,7 @@ public class IpOperations : IIpOperations {
} }
public async Task<OneFuzzResultVoid> CreatePublicNic(string resourceGroup, string name, Region region, Nsg? nsg) { public async Task<OneFuzzResultVoid> CreatePublicNic(string resourceGroup, string name, Region region, Nsg? nsg) {
_logTracer.Info($"creating nic for {resourceGroup}:{name} in {region}"); _logTracer.Info($"creating nic for {resourceGroup:Tag:ResourceGroup} - {name:Tag:Name} in {region:Tag:Region}");
var network = await Network.Init(region, _context); var network = await Network.Init(region, _context);
var subnetId = await network.GetId(); var subnetId = await network.GetId();
@ -137,7 +137,7 @@ public class IpOperations : IIpOperations {
if (subnetId is null) { if (subnetId is null) {
var r = await network.Create(); var r = await network.Create();
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.Error($"failed to create network in region {region} due to {r.ErrorV}"); _logTracer.Error($"failed to create network in region {region:Tag:Region} due to {r.ErrorV:Tag:Error}");
} }
return r; return r;
} }
@ -176,7 +176,7 @@ public class IpOperations : IIpOperations {
var onefuzzOwner = _context.ServiceConfiguration.OneFuzzOwner; var onefuzzOwner = _context.ServiceConfiguration.OneFuzzOwner;
if (!string.IsNullOrEmpty(onefuzzOwner)) { if (!string.IsNullOrEmpty(onefuzzOwner)) {
if (!networkInterface.Tags.TryAdd("OWNER", onefuzzOwner)) { if (!networkInterface.Tags.TryAdd("OWNER", onefuzzOwner)) {
_logTracer.Warning($"Failed to add tag 'OWNER':{onefuzzOwner} to nic {resourceGroup}:{name}"); _logTracer.Warning($"Failed to add tag 'OWNER':{onefuzzOwner:Tag:Owner} to nic {resourceGroup:Tag:ResourceGroup}:{name:Tag:Name}");
} }
} }
@ -189,7 +189,7 @@ public class IpOperations : IIpOperations {
); );
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_logTracer.Error($"failed to createOrUpdate network interface {name} due to {r.GetRawResponse().ReasonPhrase}"); _logTracer.Error($"failed to createOrUpdate network interface {name:Tag:Name} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
} catch (RequestFailedException ex) { } catch (RequestFailedException ex) {
if (!ex.ToString().Contains("RetryableError")) { if (!ex.ToString().Contains("RetryableError")) {
@ -212,7 +212,7 @@ public class IpOperations : IIpOperations {
var onefuzzOwner = _context.ServiceConfiguration.OneFuzzOwner; var onefuzzOwner = _context.ServiceConfiguration.OneFuzzOwner;
if (!string.IsNullOrEmpty(onefuzzOwner)) { if (!string.IsNullOrEmpty(onefuzzOwner)) {
if (!ipParams.Tags.TryAdd("OWNER", onefuzzOwner)) { if (!ipParams.Tags.TryAdd("OWNER", onefuzzOwner)) {
_logTracer.Warning($"Failed to add tag 'OWNER':{onefuzzOwner} to ip {resourceGroup}:{name}"); _logTracer.Warning($"Failed to add tag 'OWNER':{onefuzzOwner:Tag:Owner} to ip {resourceGroup:Tag:ResourceGroup}:{name:Tag:Name}");
} }
} }
@ -220,7 +220,7 @@ public class IpOperations : IIpOperations {
WaitUntil.Started, name, ipParams WaitUntil.Started, name, ipParams
); );
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_logTracer.Error($"Failed to create or update Public Ip Address {name} due to {r.GetRawResponse().ReasonPhrase}"); _logTracer.Error($"Failed to create or update Public Ip Address {name:Tag:Name} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
return; return;
@ -270,7 +270,8 @@ public class IpOperations : IIpOperations {
if (nics != null) if (nics != null)
return nics.value.SelectMany(x => x.properties.ipConfigurations.Select(i => i.properties.privateIPAddress)).WhereNotNull().ToList(); return nics.value.SelectMany(x => x.properties.ipConfigurations.Select(i => i.properties.privateIPAddress)).WhereNotNull().ToList();
} else { } else {
_logTracer.Error($"failed to get ListInstancePrivateIps due to {await response.Content.ReadAsStringAsync()}"); var body = await response.Content.ReadAsStringAsync();
_logTracer.Error($"failed to get ListInstancePrivateIps due to {body}");
} }
return new List<string>(); return new List<string>();
} }

View File

@ -32,7 +32,7 @@ public class JobOperations : StatefulOrm<Job, JobState, JobOperations>, IJobOper
if (job.EndTime == null) { if (job.EndTime == null) {
var r = await Replace(job with { EndTime = DateTimeOffset.UtcNow + TimeSpan.FromHours(job.Config.Duration) }); var r = await Replace(job with { EndTime = DateTimeOffset.UtcNow + TimeSpan.FromHours(job.Config.Duration) });
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace job {job.JobId} when calling OnStart"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace job {job.JobId:Tag:JobId} when calling OnStart");
} }
} }
} }
@ -54,7 +54,7 @@ public class JobOperations : StatefulOrm<Job, JobState, JobOperations>, IJobOper
var tasks = _context.TaskOperations.GetByJobId(job.JobId); var tasks = _context.TaskOperations.GetByJobId(job.JobId);
if (!await tasks.AnyAsync()) { if (!await tasks.AnyAsync()) {
_logTracer.Warning($"StopIfAllDone could not find any tasks for job with id {job.JobId}"); _logTracer.Warning($"StopIfAllDone could not find any tasks for job with id {job.JobId:Tag:JobId}");
} }
var anyNotStoppedTasks = await tasks.AnyAsync(task => task.State != TaskState.Stopped); var anyNotStoppedTasks = await tasks.AnyAsync(task => task.State != TaskState.Stopped);
@ -63,7 +63,7 @@ public class JobOperations : StatefulOrm<Job, JobState, JobOperations>, IJobOper
return; return;
} }
_logTracer.Info($"stopping job as all tasks are stopped: {job.JobId}"); _logTracer.Info($"stopping job as all tasks are stopped: {job.JobId:Tag:JobId}");
_ = await Stopping(job); _ = await Stopping(job);
} }
@ -82,7 +82,7 @@ public class JobOperations : StatefulOrm<Job, JobState, JobOperations>, IJobOper
await foreach (var task in _context.TaskOperations.QueryAsync(Query.PartitionKey(job.JobId.ToString()))) { await foreach (var task in _context.TaskOperations.QueryAsync(Query.PartitionKey(job.JobId.ToString()))) {
await _context.TaskOperations.MarkFailed(task, new Error(ErrorCode.TASK_FAILED, new[] { "job never not start" })); await _context.TaskOperations.MarkFailed(task, new Error(ErrorCode.TASK_FAILED, new[] { "job never not start" }));
} }
_logTracer.Info($"stopping job that never started: {job.JobId}"); _logTracer.Info($"stopping job that never started: {job.JobId:Tag:JobId}");
// updated result ignored: not used after this loop // updated result ignored: not used after this loop
_ = await _context.JobOperations.Stopping(job); _ = await _context.JobOperations.Stopping(job);
@ -90,13 +90,13 @@ public class JobOperations : StatefulOrm<Job, JobState, JobOperations>, IJobOper
} }
public async Async.Task<Job> Init(Job job) { public async Async.Task<Job> Init(Job job) {
_logTracer.Info($"init job: {job.JobId}"); _logTracer.Info($"init job: {job.JobId:Tag:JobId}");
var enabled = job with { State = JobState.Enabled }; var enabled = job with { State = JobState.Enabled };
var result = await Replace(enabled); var result = await Replace(enabled);
if (result.IsOk) { if (result.IsOk) {
return enabled; return enabled;
} else { } else {
_logTracer.WithHttpStatus(result.ErrorV).Error($"Failed to save job when init {job.JobId} : {result.ErrorV}"); _logTracer.WithHttpStatus(result.ErrorV).Error($"Failed to save job when init {job.JobId:Tag:JobId} : {result.ErrorV:Tag:Error}");
throw new Exception($"Failed to save job when init {job.JobId} : {result.ErrorV}"); throw new Exception($"Failed to save job when init {job.JobId} : {result.ErrorV}");
} }
} }
@ -124,7 +124,7 @@ public class JobOperations : StatefulOrm<Job, JobState, JobOperations>, IJobOper
if (result.IsOk) { if (result.IsOk) {
return job; return job;
} else { } else {
_logTracer.WithHttpStatus(result.ErrorV).Error($"Failed to save job when stopping {job.JobId} : {result.ErrorV}"); _logTracer.WithHttpStatus(result.ErrorV).Error($"Failed to save job when stopping {job.JobId:Tag:JobId} : {result.ErrorV:Tag:Error}");
throw new Exception($"Failed to save job when stopping {job.JobId} : {result.ErrorV}"); throw new Exception($"Failed to save job when stopping {job.JobId} : {result.ErrorV}");
} }
} }

View File

@ -29,12 +29,12 @@ public class NodeMessageOperations : Orm<NodeMessage>, INodeMessageOperations {
=> QueryAsync(Query.PartitionKey(machineId.ToString())); => QueryAsync(Query.PartitionKey(machineId.ToString()));
public async Async.Task ClearMessages(Guid machineId) { public async Async.Task ClearMessages(Guid machineId) {
_logTracer.Info($"clearing messages for node {machineId}"); _logTracer.Info($"clearing messages for node {machineId:Tag:MachineId}");
await foreach (var message in GetMessage(machineId)) { await foreach (var message in GetMessage(machineId)) {
var r = await Delete(message); var r = await Delete(message);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to delete message for node {machineId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to delete message for node {machineId:Tag:MachineId}");
} }
} }
} }
@ -43,7 +43,7 @@ public class NodeMessageOperations : Orm<NodeMessage>, INodeMessageOperations {
messageId ??= EntityBase.NewSortedKey; messageId ??= EntityBase.NewSortedKey;
var r = await Insert(new NodeMessage(machineId, messageId, message)); var r = await Insert(new NodeMessage(machineId, messageId, message));
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert message with id: {messageId} for machine id: {machineId} message: {message}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert message with id: {messageId:Tag:MessageId} for machine id: {machineId:Tag:MachineId} message: {message:Tag:Message}");
} }
} }
} }

View File

@ -301,7 +301,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
var reimageRequested = node.ReimageRequested; var reimageRequested = node.ReimageRequested;
if (!node.ReimageRequested && !node.DeleteRequested) { if (!node.ReimageRequested && !node.DeleteRequested) {
_logTracer.Info($"setting reimage_requested: {node.MachineId}"); _logTracer.Info($"setting reimage_requested: {node.MachineId:Tag:MachineId}");
reimageRequested = true; reimageRequested = true;
} }
@ -311,7 +311,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
var r = await Replace(updatedNode); var r = await Replace(updatedNode);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error("Failed to save Node record"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to save Node record for node {node.MachineId:Tag:MachineId}");
} }
return updatedNode; return updatedNode;
@ -343,7 +343,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
r = await Update(node); r = await Update(node);
} }
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to save NodeRecord, isNew: {isNew}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to save NodeRecord for node {node.MachineId:Tag:MachineId} isNew: {isNew:Tag:IsNew}");
} else { } else {
await _context.Events.SendEvent( await _context.Events.SendEvent(
new EventNodeCreated( new EventNodeCreated(
@ -369,7 +369,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
/// <param name="node"></param> /// <param name="node"></param>
/// <returns></returns> /// <returns></returns>
public async Async.Task<Node> SetHalt(Node node) { public async Async.Task<Node> SetHalt(Node node) {
_logTracer.Info($"setting halt: {node.MachineId}"); _logTracer.Info($"setting halt: {node.MachineId:Tag:MachineId}");
node = node with { DeleteRequested = true }; node = node with { DeleteRequested = true };
node = await Stop(node, true); node = await Stop(node, true);
await SendStopIfFree(node); await SendStopIfFree(node);
@ -378,11 +378,11 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
public async Async.Task<Node> SetShutdown(Node node) { public async Async.Task<Node> SetShutdown(Node node) {
//don't give out more work to the node, but let it finish existing work //don't give out more work to the node, but let it finish existing work
_logTracer.Info($"setting delete_requested: {node.MachineId}"); _logTracer.Info($"setting delete_requested: {node.MachineId:Tag:MachineId}");
node = node with { DeleteRequested = true }; node = node with { DeleteRequested = true };
var r = await Replace(node); var r = await Replace(node);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to update node with delete requested. machine id: {node.MachineId}, pool name: {node.PoolName}, pool id: {node.PoolId}, scaleset id: {node.ScalesetId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to update node with delete requested. {node.MachineId:Tag:MachineId} {node.PoolName:Tag:PoolName} {node.PoolId:Tag:PoolId} {node.ScalesetId:Tag:ScalesetId}");
} }
await SendStopIfFree(node); await SendStopIfFree(node);
@ -400,7 +400,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
public async Async.Task SendMessage(Node node, NodeCommand message) { public async Async.Task SendMessage(Node node, NodeCommand message) {
var r = await _context.NodeMessageOperations.Replace(new NodeMessage(node.MachineId, message)); var r = await _context.NodeMessageOperations.Replace(new NodeMessage(node.MachineId, message));
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace NodeMessge record for machine_id: {node.MachineId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace NodeMessge record for {node.MachineId:Tag:MachineId}");
} }
} }
@ -454,7 +454,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
var r = await Update(node); var r = await Update(node);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.Error($"Failed to update node for machine: {node.MachineId} to state {state} due to {r.ErrorV}"); _logTracer.Error($"Failed to update node for: {node.MachineId:Tag:MachineId} {state:Tag:State} due to {r.ErrorV:Tag:Error}");
// TODO: this should error out // TODO: this should error out
} }
@ -538,7 +538,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
if (!node.DebugKeepNode) { if (!node.DebugKeepNode) {
var r = await _context.NodeTasksOperations.Delete(entry); var r = await _context.NodeTasksOperations.Delete(entry);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to delete task operation for task {entry.TaskId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to delete task operation for {entry.TaskId:Tag:TaskId}");
} }
} }
} }
@ -550,7 +550,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
await _context.NodeMessageOperations.ClearMessages(node.MachineId); await _context.NodeMessageOperations.ClearMessages(node.MachineId);
var r = await base.Delete(node); var r = await base.Delete(node);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to delete node {node.MachineId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to delete node {node.MachineId:Tag:MachineId}");
} }
await _context.Events.SendEvent(new EventNodeDeleted(node.MachineId, node.ScalesetId, node.PoolName, node.State)); await _context.Events.SendEvent(new EventNodeDeleted(node.MachineId, node.ScalesetId, node.PoolName, node.State));
@ -567,7 +567,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
await _context.NodeMessageOperations.SendMessage(node.MachineId, new NodeCommand(StopTask: new StopTaskNodeCommand(task_id))); await _context.NodeMessageOperations.SendMessage(node.MachineId, new NodeCommand(StopTask: new StopTaskNodeCommand(task_id)));
if (!await StopIfComplete(node)) { if (!await StopIfComplete(node)) {
_logTracer.Info($"nodes: stopped task on node, but not reimaging due to other tasks: task_id:{task_id} machine_id:{node.MachineId}"); _logTracer.Info($"nodes: stopped task on node, but not reimaging due to other tasks: {task_id:Tag:TaskId} {node.MachineId:Tag:MachineId}");
} }
} }
@ -598,7 +598,7 @@ public class NodeOperations : StatefulOrm<Node, NodeState, NodeOperations>, INod
return false; return false;
} }
} }
_logTracer.Info($"node: stopping busy node with all tasks complete: {node.MachineId}"); _logTracer.Info($"node: stopping busy node with all tasks complete: {node.MachineId:Tag:MachineId}");
_ = await Stop(node, done: done); _ = await Stop(node, done: done);
return true; return true;

View File

@ -53,11 +53,11 @@ public class NodeTasksOperations : StatefulOrm<NodeTasks, NodeTaskState, NodeTas
} }
public async Async.Task ClearByMachineId(Guid machineId) { public async Async.Task ClearByMachineId(Guid machineId) {
_logTracer.Info($"clearing tasks for node {machineId}"); _logTracer.Info($"clearing tasks for node {machineId:Tag:MachineId}");
await foreach (var entry in GetByMachineId(machineId)) { await foreach (var entry in GetByMachineId(machineId)) {
var res = await Delete(entry); var res = await Delete(entry);
if (!res.IsOk) { if (!res.IsOk) {
_logTracer.WithHttpStatus(res.ErrorV).Error($"failed to delete node task entry for machine_id: {entry.MachineId}"); _logTracer.WithHttpStatus(res.ErrorV).Error($"failed to delete node task entry for {entry.MachineId:Tag:MachineId}");
} }
} }
} }

View File

@ -96,10 +96,10 @@ public class NotificationOperations : Orm<Notification>, INotificationOperations
if (replaceExisting) { if (replaceExisting) {
var existing = this.SearchByRowKeys(new[] { container.String }); var existing = this.SearchByRowKeys(new[] { container.String });
await foreach (var existingEntry in existing) { await foreach (var existingEntry in existing) {
_logTracer.Info($"deleting existing notification: {existingEntry.NotificationId} - {container}"); _logTracer.Info($"deleting existing notification: {existingEntry.NotificationId:Tag:NotificationId} - {container:Tag:Container}");
var rr = await this.Delete(existingEntry); var rr = await this.Delete(existingEntry);
if (!rr.IsOk) { if (!rr.IsOk) {
_logTracer.WithHttpStatus(rr.ErrorV).Error($"failed to delete existing notification {existingEntry.NotificationId} - {container}"); _logTracer.WithHttpStatus(rr.ErrorV).Error($"failed to delete existing notification {existingEntry.NotificationId:Tag:NotificationId} - {container:Tag:Container}");
} }
} }
} }
@ -107,9 +107,9 @@ public class NotificationOperations : Orm<Notification>, INotificationOperations
var entry = new Notification(Guid.NewGuid(), container, configWithHiddenSecret); var entry = new Notification(Guid.NewGuid(), container, configWithHiddenSecret);
var r = await this.Insert(entry); var r = await this.Insert(entry);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert notification with id {entry.NotificationId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert notification {entry.NotificationId:Tag:NotificationId}");
} }
_logTracer.Info($"created notification. notification_id:{entry.NotificationId} container:{entry.Container}"); _logTracer.Info($"created notification {entry.NotificationId:Tag:NotificationId} - {entry.Container:Tag:Container}");
return OneFuzzResult<Notification>.Ok(entry); return OneFuzzResult<Notification>.Ok(entry);
} }

View File

@ -49,7 +49,7 @@ namespace Microsoft.OneFuzz.Service {
} }
if (subnet.Data.NetworkSecurityGroup != null && subnet.Data.NetworkSecurityGroup.Id == nsg.Id) { if (subnet.Data.NetworkSecurityGroup != null && subnet.Data.NetworkSecurityGroup.Id == nsg.Id) {
_logTracer.Info($"Subnet {subnet.Data.Name} and NSG {name} already associated, not updating"); _logTracer.Info($"Subnet {subnet.Data.Name:Tag:SubnetName} - {name:Tag:NsgName} already associated, not updating");
return OneFuzzResult<bool>.Ok(true); return OneFuzzResult<bool>.Ok(true);
} }
@ -81,7 +81,7 @@ namespace Microsoft.OneFuzz.Service {
); );
} }
_logTracer.Info($"dissociating nic {nic.Data.Name} with nsg: {_context.Creds.GetBaseResourceGroup()} {nsg.Name}"); _logTracer.Info($"dissociating {nic.Data.Name:Tag:NicName} with {_context.Creds.GetBaseResourceGroup():Tag:ResourceGroup} - {nsg.Name:Tag:NsgName}");
nic.Data.NetworkSecurityGroup = null; nic.Data.NetworkSecurityGroup = null;
try { try {
_ = await _context.Creds.GetResourceGroupResource() _ = await _context.Creds.GetResourceGroupResource()
@ -115,10 +115,10 @@ namespace Microsoft.OneFuzz.Service {
return response?.Value; return response?.Value;
} catch (RequestFailedException ex) { } catch (RequestFailedException ex) {
if (ex.ErrorCode == "ResourceNotFound") { if (ex.ErrorCode == "ResourceNotFound") {
_logTracer.Verbose($"could not find nsg with name {name}"); _logTracer.Verbose($"could not find {name:Tag:NsgName}");
return null; return null;
} else { } else {
_logTracer.Exception(ex, $"failed to get nsg {name}"); _logTracer.Exception(ex, $"failed to get {name:Tag:NsgName}");
throw; throw;
} }
} }
@ -137,19 +137,19 @@ namespace Microsoft.OneFuzz.Service {
/// Returns False if failed to start deletion. /// Returns False if failed to start deletion.
/// </summary> /// </summary>
public async Async.Task<bool> StartDeleteNsg(string name) { public async Async.Task<bool> StartDeleteNsg(string name) {
_logTracer.Info($"deleting nsg: {name}"); _logTracer.Info($"deleting nsg: {name:Tag:NsgName}");
try { try {
var nsg = await _context.Creds.GetResourceGroupResource().GetNetworkSecurityGroupAsync(name); var nsg = await _context.Creds.GetResourceGroupResource().GetNetworkSecurityGroupAsync(name);
var r = await nsg.Value.DeleteAsync(WaitUntil.Started); var r = await nsg.Value.DeleteAsync(WaitUntil.Started);
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_logTracer.Error($"failed to start nsg deletion for nsg: {name} due to {r.GetRawResponse().ReasonPhrase}"); _logTracer.Error($"failed to start nsg deletion for: {name:Tag:NsgName} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
return true; return true;
} catch (RequestFailedException ex) { } catch (RequestFailedException ex) {
if (ex.ErrorCode == "ResourceNotFound") { if (ex.ErrorCode == "ResourceNotFound") {
return true; return true;
} else { } else {
_logTracer.Exception(ex, $"failed to delete nsg {name}"); _logTracer.Exception(ex, $"failed to delete {name:Tag:NsgName}");
throw; throw;
} }
} }
@ -215,7 +215,7 @@ namespace Microsoft.OneFuzz.Service {
} }
_logTracer.Info( _logTracer.Info(
$"setting allowed incoming connection sources for nsg: {_context.Creds.GetBaseResourceGroup()} {name}" $"setting allowed incoming connection sources for nsg: {_context.Creds.GetBaseResourceGroup():Tag:ResourceGroup} {name:Tag:NsgName}"
); );
var allSources = new List<string>(); var allSources = new List<string>();
@ -282,12 +282,12 @@ namespace Microsoft.OneFuzz.Service {
} }
if (nic.Data.NetworkSecurityGroup != null && nic.Data.NetworkSecurityGroup.Id == nsg.Id) { if (nic.Data.NetworkSecurityGroup != null && nic.Data.NetworkSecurityGroup.Id == nsg.Id) {
_logTracer.Info($"NIC {nic.Data.Name} and NSG {nsg.Data.Name} already associated, not updating"); _logTracer.Info($"{nic.Data.Name:Tag:NicName} - {nsg.Data.Name:Tag:NsgName} already associated, not updating");
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
nic.Data.NetworkSecurityGroup = nsg.Data; nic.Data.NetworkSecurityGroup = nsg.Data;
_logTracer.Info($"associating nic {nic.Data.Name} with nsg: {_context.Creds.GetBaseResourceGroup()} {nsg.Data.Name}"); _logTracer.Info($"associating {nic.Data.Name:Tag:NicName} - {_context.Creds.GetBaseResourceGroup():Tag:ResourceGroup} - {nsg.Data.Name:Tag:NsgName}");
try { try {
_ = await _context.Creds.GetResourceGroupResource().GetNetworkInterfaces().CreateOrUpdateAsync( _ = await _context.Creds.GetResourceGroupResource().GetNetworkInterfaces().CreateOrUpdateAsync(
@ -307,7 +307,7 @@ namespace Microsoft.OneFuzz.Service {
} }
public async Task<OneFuzzResultVoid> UpdateNsg(NetworkSecurityGroupData nsg) { public async Task<OneFuzzResultVoid> UpdateNsg(NetworkSecurityGroupData nsg) {
_logTracer.Info($"updating nsg {_context.Creds.GetBaseResourceGroup()}:{nsg.Location}:{nsg.Name}"); _logTracer.Info($"updating nsg {_context.Creds.GetBaseResourceGroup():Tag:ResourceGroup} - {nsg.Location:Tag:Location} - {nsg.Name:Tag:NsgName}");
try { try {
_ = await _context.Creds.GetResourceGroupResource().GetNetworkSecurityGroups().CreateOrUpdateAsync( _ = await _context.Creds.GetResourceGroupResource().GetNetworkSecurityGroups().CreateOrUpdateAsync(

View File

@ -45,7 +45,7 @@ public class PoolOperations : StatefulOrm<Pool, PoolState, PoolOperations>, IPoo
var r = await Insert(newPool); var r = await Insert(newPool);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to save new pool. Pool name: {newPool.Name}, PoolId: {newPool.PoolId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to save new pool. {newPool.Name:Tag:PoolName} - {newPool.PoolId:Tag:PoolId}");
} }
await _context.Events.SendEvent(new EventPoolCreated(PoolName: newPool.Name, Os: newPool.Os, Arch: newPool.Arch, Managed: newPool.Managed)); await _context.Events.SendEvent(new EventPoolCreated(PoolName: newPool.Name, Os: newPool.Os, Arch: newPool.Arch, Managed: newPool.Managed));
return newPool; return newPool;
@ -151,7 +151,7 @@ public class PoolOperations : StatefulOrm<Pool, PoolState, PoolOperations>, IPoo
pool = pool with { State = state }; pool = pool with { State = state };
var r = await Replace(pool); var r = await Replace(pool);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace pool {pool.PoolId} when setting state"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace pool {pool.PoolId:Tag:PoolId} when setting state");
} }
return pool; return pool;
} }
@ -167,7 +167,7 @@ public class PoolOperations : StatefulOrm<Pool, PoolState, PoolOperations>, IPoo
new public async Async.Task Delete(Pool pool) { new public async Async.Task Delete(Pool pool) {
var r = await base.Delete(pool); var r = await base.Delete(pool);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to delete pool: {pool.Name}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to delete pool: {pool.Name:Tag:PoolName}");
} }
var poolQueue = GetPoolQueue(pool.PoolId); var poolQueue = GetPoolQueue(pool.PoolId);
await _context.Queue.DeleteQueue(poolQueue, StorageType.Corpus); await _context.Queue.DeleteQueue(poolQueue, StorageType.Corpus);
@ -183,7 +183,7 @@ public class PoolOperations : StatefulOrm<Pool, PoolState, PoolOperations>, IPoo
var nodes = _context.NodeOperations.SearchByPoolName(pool.Name); var nodes = _context.NodeOperations.SearchByPoolName(pool.Name);
if (scalesets is null && nodes is null) { if (scalesets is null && nodes is null) {
_logTracer.Info($"pool stopped, deleting {pool.Name}"); _logTracer.Info($"pool stopped, deleting {pool.Name:Tag:PoolName}");
await Delete(pool); await Delete(pool);
return pool; return pool;
} }
@ -207,7 +207,7 @@ public class PoolOperations : StatefulOrm<Pool, PoolState, PoolOperations>, IPoo
//if it was changed by the caller - caller should perform save operation //if it was changed by the caller - caller should perform save operation
var r = await Update(pool); var r = await Update(pool);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.Error($"Failed to update pool record. pool name: {pool.Name}, pool id: {pool.PoolId}"); _logTracer.Error($"Failed to update pool record. {pool.Name:Tag:PoolName} - {pool.PoolId:Tag:PoolId}");
} }
return pool; return pool;
} }
@ -218,7 +218,7 @@ public class PoolOperations : StatefulOrm<Pool, PoolState, PoolOperations>, IPoo
var nodes = _context.NodeOperations.SearchByPoolName(pool.Name); var nodes = _context.NodeOperations.SearchByPoolName(pool.Name);
if (scalesets is null && nodes is null) { if (scalesets is null && nodes is null) {
_logTracer.Info($"pool stopped, deleting: {pool.Name}"); _logTracer.Info($"pool stopped, deleting: {pool.Name:Tag:PoolName}");
await Delete(pool); await Delete(pool);
} }

View File

@ -79,7 +79,7 @@ public class ProxyForwardOperations : Orm<ProxyForward>, IProxyForwardOperations
var result = await Replace(entry); var result = await Replace(entry);
if (!result.IsOk) { if (!result.IsOk) {
_logTracer.WithHttpStatus(result.ErrorV).Info($"port is already used {entry}"); _logTracer.WithHttpStatus(result.ErrorV).Info($"port is already used {entry:Tag:Entry}");
} }
return OneFuzzResult.Ok(entry); return OneFuzzResult.Ok(entry);

View File

@ -47,7 +47,7 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
if (IsOutdated(proxy)) { if (IsOutdated(proxy)) {
var r1 = await Replace(proxy with { Outdated = true }); var r1 = await Replace(proxy with { Outdated = true });
if (!r1.IsOk) { if (!r1.IsOk) {
_logTracer.WithHttpStatus(r1.ErrorV).Error($"failed to replace record to mark proxy {proxy.ProxyId} as outdated"); _logTracer.WithHttpStatus(r1.ErrorV).Error($"failed to replace record to mark proxy {proxy.ProxyId:Tag:ProxyId} as outdated");
} }
continue; continue;
} }
@ -60,12 +60,12 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
} }
} }
_logTracer.Info($"creating proxy: region:{region}"); _logTracer.Info($"creating proxy: region:{region:Tag:Region}");
var newProxy = new Proxy(region, Guid.NewGuid(), DateTimeOffset.UtcNow, VmState.Init, await Auth.BuildAuth(_logTracer), null, null, _context.ServiceConfiguration.OneFuzzVersion, null, false); var newProxy = new Proxy(region, Guid.NewGuid(), DateTimeOffset.UtcNow, VmState.Init, await Auth.BuildAuth(_logTracer), null, null, _context.ServiceConfiguration.OneFuzzVersion, null, false);
var r = await Replace(newProxy); var r = await Replace(newProxy);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to save new proxy {newProxy.ProxyId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to save new proxy {newProxy.ProxyId:Tag:ProxyId} in {region:Tag:Region}");
} }
await _context.Events.SendEvent(new EventProxyCreated(region, newProxy.ProxyId)); await _context.Events.SendEvent(new EventProxyCreated(region, newProxy.ProxyId));
@ -75,7 +75,7 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
public async Task<bool> IsUsed(Proxy proxy) { public async Task<bool> IsUsed(Proxy proxy) {
var forwards = await GetForwards(proxy); var forwards = await GetForwards(proxy);
if (forwards.Count == 0) { if (forwards.Count == 0) {
_logTracer.Info($"no forwards {proxy.Region}"); _logTracer.Info($"no forwards {proxy.Region:Tag:Region}");
return false; return false;
} }
return true; return true;
@ -85,12 +85,12 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
var tenMinutesAgo = DateTimeOffset.UtcNow - TimeSpan.FromMinutes(10); var tenMinutesAgo = DateTimeOffset.UtcNow - TimeSpan.FromMinutes(10);
if (proxy.Heartbeat is not null && proxy.Heartbeat.TimeStamp < tenMinutesAgo) { if (proxy.Heartbeat is not null && proxy.Heartbeat.TimeStamp < tenMinutesAgo) {
_logTracer.Info($"last heartbeat is more than an 10 minutes old: {proxy.Region} - last heartbeat:{proxy.Heartbeat} compared_to:{tenMinutesAgo}"); _logTracer.Info($"last heartbeat is more than an 10 minutes old: {proxy.Region:Tag:Region} {proxy.Heartbeat:Tag:LastHeartbeat} {tenMinutesAgo:Tag:ComparedToMinutesAgo}");
return false; return false;
} }
if (proxy.Heartbeat is not null && proxy.TimeStamp is not null && proxy.TimeStamp < tenMinutesAgo) { if (proxy.Heartbeat is not null && proxy.TimeStamp is not null && proxy.TimeStamp < tenMinutesAgo) {
_logTracer.Error($"no heartbeat in the last 10 minutes: {proxy.Region} timestamp: {proxy.TimeStamp} compared_to:{tenMinutesAgo}"); _logTracer.Error($"no heartbeat in the last 10 minutes: {proxy.Region:Tag:Region} {proxy.TimeStamp:Tag:Timestamp} {tenMinutesAgo:Tag:ComparedToMinutesAgo}");
return false; return false;
} }
@ -103,13 +103,13 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
} }
if (proxy.Version != _context.ServiceConfiguration.OneFuzzVersion) { if (proxy.Version != _context.ServiceConfiguration.OneFuzzVersion) {
_logTracer.Info($"mismatch version: proxy:{proxy.Version} service:{_context.ServiceConfiguration.OneFuzzVersion} state:{proxy.State}"); _logTracer.Info($"mismatch version: proxy:{proxy.Version:Tag:ProxyVersion} {_context.ServiceConfiguration.OneFuzzVersion:Tag:ServiceVersion} {proxy.State:Tag:State}");
return true; return true;
} }
if (proxy.CreatedTimestamp is not null) { if (proxy.CreatedTimestamp is not null) {
if (proxy.CreatedTimestamp < (DateTimeOffset.UtcNow - PROXY_LIFESPAN)) { if (proxy.CreatedTimestamp < (DateTimeOffset.UtcNow - PROXY_LIFESPAN)) {
_logTracer.Info($"proxy older than 7 days:proxy-created:{proxy.CreatedTimestamp} state:{proxy.State}"); _logTracer.Info($"proxy older than 7 days: {proxy.CreatedTimestamp:Tag:ProxyCreated} - {proxy.State:Tag:State}");
return true; return true;
} }
} }
@ -143,7 +143,7 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
var newProxy = proxy with { State = state }; var newProxy = proxy with { State = state };
var r = await Replace(newProxy); var r = await Replace(newProxy);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace proxy with id {newProxy.ProxyId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace proxy with {newProxy.ProxyId:Tag:ProxyId}");
} }
await _context.Events.SendEvent(new EventProxyStateUpdated(newProxy.Region, newProxy.ProxyId, newProxy.State)); await _context.Events.SendEvent(new EventProxyStateUpdated(newProxy.Region, newProxy.ProxyId, newProxy.State));
return newProxy; return newProxy;
@ -157,7 +157,7 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
if (entry.EndTime < DateTimeOffset.UtcNow) { if (entry.EndTime < DateTimeOffset.UtcNow) {
var r = await _context.ProxyForwardOperations.Delete(entry); var r = await _context.ProxyForwardOperations.Delete(entry);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to delete proxy forward for proxy {proxy.ProxyId} in region {proxy.Region}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to delete proxy forward for {proxy.ProxyId:Tag:ProxyId} in {proxy.Region:Tag:Region}");
} }
} else { } else {
forwards.Add(new Forward(entry.Port, entry.DstPort, entry.DstIp)); forwards.Add(new Forward(entry.Port, entry.DstPort, entry.DstIp));
@ -199,7 +199,7 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
} }
var r = await Replace(proxy); var r = await Replace(proxy);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to save proxy {proxy.ProxyId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to save proxy {proxy.ProxyId:Tag:ProxyId}");
} }
return proxy; return proxy;
} }
@ -215,7 +215,7 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
return proxy; return proxy;
} }
_logTracer.Error($"vm failed: {proxy.Region} -{error}"); _logTracer.Error($"vm failed: {proxy.Region:Tag:Region} -{error:Tag:Error}");
await _context.Events.SendEvent(new EventProxyFailed(proxy.Region, proxy.ProxyId, error)); await _context.Events.SendEvent(new EventProxyFailed(proxy.Region, proxy.ProxyId, error));
return await SetState(proxy with { Error = error }, VmState.Stopping); return await SetState(proxy with { Error = error }, VmState.Stopping);
} }
@ -293,9 +293,9 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
var config = await _context.ConfigOperations.Fetch(); var config = await _context.ConfigOperations.Fetch();
var vm = GetVm(proxy, config); var vm = GetVm(proxy, config);
if (!await _context.VmOperations.IsDeleted(vm)) { if (!await _context.VmOperations.IsDeleted(vm)) {
_logTracer.Info($"stopping proxy: {proxy.Region}"); _logTracer.Info($"stopping proxy: {proxy.Region:Tag:Region}");
if (await _context.VmOperations.Delete(vm)) { if (await _context.VmOperations.Delete(vm)) {
_logTracer.Info($"deleted proxy vm for region {proxy.Region}, name: {vm.Name}"); _logTracer.Info($"deleted proxy vm for region {proxy.Region:Tag:Region}, name: {vm.Name:Tag:Name}");
} }
return proxy; return proxy;
} }
@ -305,7 +305,7 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
public async Task<Proxy> Stopped(Proxy proxy) { public async Task<Proxy> Stopped(Proxy proxy) {
var stoppedVm = await SetState(proxy, VmState.Stopped); var stoppedVm = await SetState(proxy, VmState.Stopped);
_logTracer.Info($"removing proxy: {stoppedVm.Region}"); _logTracer.Info($"removing proxy: {stoppedVm.Region:Tag:Region}");
await _context.Events.SendEvent(new EventProxyDeleted(stoppedVm.Region, stoppedVm.ProxyId)); await _context.Events.SendEvent(new EventProxyDeleted(stoppedVm.Region, stoppedVm.ProxyId));
await Delete(stoppedVm).IgnoreResult(); await Delete(stoppedVm).IgnoreResult();
return stoppedVm; return stoppedVm;

View File

@ -46,7 +46,7 @@ public class Queue : IQueue {
try { try {
_ = await queue.SendMessageAsync(message, visibilityTimeout: visibilityTimeout, timeToLive: timeToLive); _ = await queue.SendMessageAsync(message, visibilityTimeout: visibilityTimeout, timeToLive: timeToLive);
} catch (Exception ex) { } catch (Exception ex) {
_log.Exception(ex, $"Failed to send message {message}"); _log.Exception(ex, $"Failed to send {message:Tag:Message}");
throw; throw;
} }
} }
@ -63,13 +63,13 @@ public class Queue : IQueue {
var serialized = JsonSerializer.Serialize(obj, EntityConverter.GetJsonSerializerOptions()); var serialized = JsonSerializer.Serialize(obj, EntityConverter.GetJsonSerializerOptions());
var res = await queueClient.SendMessageAsync(serialized, visibilityTimeout: visibilityTimeout, timeToLive); var res = await queueClient.SendMessageAsync(serialized, visibilityTimeout: visibilityTimeout, timeToLive);
if (res.GetRawResponse().IsError) { if (res.GetRawResponse().IsError) {
_log.Error($"Failed to send message {serialized} in queue {name} due to {res.GetRawResponse().ReasonPhrase}"); _log.Error($"Failed to send {serialized:Tag:Message} in {name:Tag:QueueName} due to {res.GetRawResponse().ReasonPhrase:Tag:Error}");
return false; return false;
} else { } else {
return true; return true;
} }
} catch (Exception ex) { } catch (Exception ex) {
_log.Exception(ex, $"Failed to queue message in queue {name}"); _log.Exception(ex, $"Failed to queue message in {name:Tag:QueueName}");
return false; return false;
} }
} }
@ -88,7 +88,7 @@ public class Queue : IQueue {
var resp = await client.CreateIfNotExistsAsync(); var resp = await client.CreateIfNotExistsAsync();
if (resp is not null && resp.IsError) { if (resp is not null && resp.IsError) {
_log.Error($"failed to create queue {name} due to {resp.ReasonPhrase}"); _log.Error($"failed to create {name:Tag:QueueName} due to {resp.ReasonPhrase:Tag:Error}");
} }
} }
@ -96,7 +96,7 @@ public class Queue : IQueue {
var client = await GetQueueClient(name, storageType); var client = await GetQueueClient(name, storageType);
var resp = await client.DeleteIfExistsAsync(); var resp = await client.DeleteIfExistsAsync();
if (resp.GetRawResponse() is not null && resp.GetRawResponse().IsError) { if (resp.GetRawResponse() is not null && resp.GetRawResponse().IsError) {
_log.Error($"failed to delete queue {name} due to {resp.GetRawResponse().ReasonPhrase}"); _log.Error($"failed to delete {name:Tag:QueueName} due to {resp.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
} }
@ -104,7 +104,7 @@ public class Queue : IQueue {
var client = await GetQueueClient(name, storageType); var client = await GetQueueClient(name, storageType);
var resp = await client.ClearMessagesAsync(); var resp = await client.ClearMessagesAsync();
if (resp is not null && resp.IsError) { if (resp is not null && resp.IsError) {
_log.Error($"failed to clear the queue {name} due to {resp.ReasonPhrase}"); _log.Error($"failed to clear the {name:Tag:QueueName} due to {resp.ReasonPhrase:Tag:Error}");
} }
} }
@ -115,14 +115,14 @@ public class Queue : IQueue {
foreach (var msg in msgs.Value) { foreach (var msg in msgs.Value) {
var resp = await client.DeleteMessageAsync(msg.MessageId, msg.PopReceipt); var resp = await client.DeleteMessageAsync(msg.MessageId, msg.PopReceipt);
if (resp.IsError) { if (resp.IsError) {
_log.Error($"failed to delete message from the queue {name} due to {resp.ReasonPhrase}"); _log.Error($"failed to delete message from the {name:Tag:QueueName} due to {resp.ReasonPhrase:Tag:Error}");
return false; return false;
} else { } else {
return true; return true;
} }
} }
} catch (RequestFailedException ex) when (ex.Status == 404 || ex.ErrorCode == "QueueNotFound") { } catch (RequestFailedException ex) when (ex.Status == 404 || ex.ErrorCode == "QueueNotFound") {
_log.Info($"tried to remove message from queue {name} but it doesn't exist"); _log.Info($"tried to remove message from queue {name:Tag:QueueName} but it doesn't exist");
return false; return false;
} }
@ -139,7 +139,7 @@ public class Queue : IQueue {
return result; return result;
} else if (msgs.GetRawResponse().IsError) { } else if (msgs.GetRawResponse().IsError) {
_log.Error($"failed to peek messages due to {msgs.GetRawResponse().ReasonPhrase}"); _log.Error($"failed to peek messages in {name:Tag:QueueName} due to {msgs.GetRawResponse().ReasonPhrase:Tag:Error}");
return result; return result;
} else { } else {
foreach (var msg in msgs.Value) { foreach (var msg in msgs.Value) {

View File

@ -30,7 +30,7 @@ public class Reports : IReports {
var filePath = string.Join("/", new[] { container.String, fileName }); var filePath = string.Join("/", new[] { container.String, fileName });
if (!fileName.EndsWith(".json", StringComparison.Ordinal)) { if (!fileName.EndsWith(".json", StringComparison.Ordinal)) {
if (expectReports) { if (expectReports) {
_log.Error($"get_report invalid extension: {filePath}"); _log.Error($"get_report invalid extension: {filePath:Tag:FilePath}");
} }
return null; return null;
} }
@ -39,7 +39,7 @@ public class Reports : IReports {
if (blob == null) { if (blob == null) {
if (expectReports) { if (expectReports) {
_log.Error($"get_report invalid blob: {filePath}"); _log.Error($"get_report invalid blob: {filePath:Tag:FilePath}");
} }
return null; return null;
} }
@ -52,7 +52,7 @@ public class Reports : IReports {
if (regressionReport == null || regressionReport.CrashTestResult == null) { if (regressionReport == null || regressionReport.CrashTestResult == null) {
var report = JsonSerializer.Deserialize<Report>(content, EntityConverter.GetJsonSerializerOptions()); var report = JsonSerializer.Deserialize<Report>(content, EntityConverter.GetJsonSerializerOptions());
if (expectReports && report == null) { if (expectReports && report == null) {
_log.Error($"unable to parse report ({filePath}) as a report or regression"); _log.Error($"unable to parse report ({filePath:Tag:FilePath}) as a report or regression");
return null; return null;
} }
return report; return report;

View File

@ -93,12 +93,12 @@ public class ReproOperations : StatefulOrm<Repro, VmState, ReproOperations>, IRe
_logTracer.Info($"vm stopping: {repro.VmId}"); _logTracer.Info($"vm stopping: {repro.VmId}");
var rr = await vmOperations.Delete(vm); var rr = await vmOperations.Delete(vm);
if (!rr) { if (!rr) {
_logTracer.Error($"failed to delete repro vm with id {repro.VmId}"); _logTracer.Error($"failed to delete repro {repro.VmId:Tag:VmId}");
} }
repro = repro with { State = VmState.Stopping }; repro = repro with { State = VmState.Stopping };
var r = await Replace(repro); var r = await Replace(repro);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace repro {repro.VmId} marked Stopping"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace repro {repro.VmId:Tag:VmId} marked Stopping");
} }
return repro; return repro;
} else { } else {
@ -107,12 +107,12 @@ public class ReproOperations : StatefulOrm<Repro, VmState, ReproOperations>, IRe
} }
public async Async.Task<Repro> Stopped(Repro repro) { public async Async.Task<Repro> Stopped(Repro repro) {
_logTracer.Info($"vm stopped: {repro.VmId}"); _logTracer.Info($"vm stopped: {repro.VmId:Tag:VmId}");
// BUG?: why are we updating repro and then deleting it and returning a new value // BUG?: why are we updating repro and then deleting it and returning a new value
repro = repro with { State = VmState.Stopped }; repro = repro with { State = VmState.Stopped };
var r = await Delete(repro); var r = await Delete(repro);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to delete repro {repro.VmId} marked as stopped"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to delete repro {repro.VmId:Tag:VmId} marked as stopped");
} }
return repro; return repro;
} }
@ -161,7 +161,7 @@ public class ReproOperations : StatefulOrm<Repro, VmState, ReproOperations>, IRe
var r = await Replace(repro); var r = await Replace(repro);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace init repro"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace init repro: {repro.VmId:Tag:VmId}");
} }
return repro; return repro;
} }
@ -279,13 +279,13 @@ public class ReproOperations : StatefulOrm<Repro, VmState, ReproOperations>, IRe
); );
} }
_logTracer.Info("saved repro script"); _logTracer.Info($"saved repro script {repro.VmId:Tag:VmId}");
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
public async Async.Task<Repro> SetError(Repro repro, Error result) { public async Async.Task<Repro> SetError(Repro repro, Error result) {
_logTracer.Info( _logTracer.Info(
$"repro failed: vm_id: {repro.VmId} task_id: {repro.TaskId} error: {result}" $"repro failed: {repro.VmId:Tag:VmId} - {repro.TaskId:Tag:TaskId} {result:Tag:Error}"
); );
repro = repro with { repro = repro with {
@ -295,7 +295,7 @@ public class ReproOperations : StatefulOrm<Repro, VmState, ReproOperations>, IRe
var r = await Replace(repro); var r = await Replace(repro);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace repro record for {repro.VmId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace repro record for {repro.VmId:Tag:VmId}");
} }
return repro; return repro;
} }
@ -328,7 +328,7 @@ public class ReproOperations : StatefulOrm<Repro, VmState, ReproOperations>, IRe
var r = await _context.ReproOperations.Insert(vm); var r = await _context.ReproOperations.Insert(vm);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert repro record for {vm.VmId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert repro record for {vm.VmId:Tag:VmId}");
} }
return OneFuzzResult<Repro>.Ok(vm); return OneFuzzResult<Repro>.Ok(vm);
} else { } else {

View File

@ -20,7 +20,7 @@ public class RequestHandling : IRequestHandling {
public async Async.Task<HttpResponseData> NotOk(HttpRequestData request, Error error, string context, HttpStatusCode statusCode = HttpStatusCode.BadRequest) { public async Async.Task<HttpResponseData> NotOk(HttpRequestData request, Error error, string context, HttpStatusCode statusCode = HttpStatusCode.BadRequest) {
var statusNum = (int)statusCode; var statusNum = (int)statusCode;
if (statusNum >= 400 && statusNum <= 599) { if (statusNum >= 400 && statusNum <= 599) {
_log.Error($"request error - {context}: {error}"); _log.Error($"request error: {context:Tag:Context} - {error:Tag:Error}");
var response = HttpResponseData.CreateResponse(request); var response = HttpResponseData.CreateResponse(request);
await response.WriteAsJsonAsync(error); await response.WriteAsJsonAsync(error);

View File

@ -38,13 +38,11 @@ public interface IScalesetOperations : IStatefulOrm<Scaleset, ScalesetState> {
} }
public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetOperations>, IScalesetOperations { public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetOperations>, IScalesetOperations {
const string SCALESET_LOG_PREFIX = "scalesets: ";
private readonly ILogTracer _log; private readonly ILogTracer _log;
public ScalesetOperations(ILogTracer log, IOnefuzzContext context) public ScalesetOperations(ILogTracer log, IOnefuzzContext context)
: base(log, context) { : base(log.WithTag("Component", "scalesets"), context) {
_log = log; _log = base._logTracer;
} }
@ -65,7 +63,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
var size = await _context.VmssOperations.GetVmssSize(scaleset.ScalesetId); var size = await _context.VmssOperations.GetVmssSize(scaleset.ScalesetId);
if (size is null) { if (size is null) {
_log.Info($"{SCALESET_LOG_PREFIX} scaleset is unavailable. scaleset_id: {scaleset.ScalesetId}"); _log.Info($"scaleset is unavailable {scaleset.ScalesetId:Tag:ScalesetId}");
//#if the scaleset is missing, this is an indication the scaleset //#if the scaleset is missing, this is an indication the scaleset
//# was manually deleted, rather than having OneFuzz delete it. As //# was manually deleted, rather than having OneFuzz delete it. As
//# such, we should go thruogh the process of deleting it. //# such, we should go thruogh the process of deleting it.
@ -76,12 +74,12 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
if (size != scaleset.Size) { if (size != scaleset.Size) {
//# Azure auto-scaled us or nodes were manually added/removed //# Azure auto-scaled us or nodes were manually added/removed
//# New node state will be synced in cleanup_nodes //# New node state will be synced in cleanup_nodes
_log.Info($"{SCALESET_LOG_PREFIX} unexpected scaleset size, resizing. scaleset_id: {scaleset.ScalesetId} expected:{scaleset.Size} actual:{size}"); _log.Info($"unexpected scaleset size, resizing {scaleset.ScalesetId:Tag:ScalesetId} {scaleset.Size:Tag:ExpectedSize} {size:Tag:ActualSize}");
scaleset = scaleset with { Size = size.Value }; scaleset = scaleset with { Size = size.Value };
var replaceResult = await Replace(scaleset); var replaceResult = await Replace(scaleset);
if (!replaceResult.IsOk) { if (!replaceResult.IsOk) {
_log.WithHttpStatus(replaceResult.ErrorV).Error($"Failed to update scaleset size for scaleset {scaleset.ScalesetId}"); _log.WithHttpStatus(replaceResult.ErrorV).Error($"failed to update scaleset size for {scaleset.ScalesetId:Tag:ScalesetId}");
} }
} }
@ -92,7 +90,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
if (scaleset.State != ScalesetState.Running) if (scaleset.State != ScalesetState.Running)
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
_log.Info($"syncing auto-scale settings for scaleset {scaleset.ScalesetId}"); _log.Info($"syncing auto-scale settings for scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
var autoscaleProfile = await _context.AutoScaleOperations.GetAutoScaleProfile(scaleset.ScalesetId); var autoscaleProfile = await _context.AutoScaleOperations.GetAutoScaleProfile(scaleset.ScalesetId);
if (!autoscaleProfile.IsOk) { if (!autoscaleProfile.IsOk) {
@ -114,14 +112,14 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
if (scaleAction.Direction == ScaleDirection.Increase) { if (scaleAction.Direction == ScaleDirection.Increase) {
scaleOutAmount = Int32.Parse(scaleAction.Value); scaleOutAmount = Int32.Parse(scaleAction.Value);
_logTracer.Info($"Scaleout cooldown in seconds. Before: {scaleOutCooldown}"); _logTracer.Info($"Scaleout cooldown in seconds. {scaleOutCooldown:Tag:Before}");
scaleOutCooldown = (long)scaleAction.Cooldown.TotalMinutes; scaleOutCooldown = (long)scaleAction.Cooldown.TotalMinutes;
_logTracer.Info($"Scaleout cooldown in seconds. After: {scaleOutCooldown}"); _logTracer.Info($"Scaleout cooldown in seconds. {scaleOutCooldown:Tag:After}");
} else if (scaleAction.Direction == ScaleDirection.Decrease) { } else if (scaleAction.Direction == ScaleDirection.Decrease) {
scaleInAmount = Int32.Parse(scaleAction.Value); scaleInAmount = Int32.Parse(scaleAction.Value);
_logTracer.Info($"Scalin cooldown in seconds. Before: {scaleInCooldown}"); _logTracer.Info($"Scalin cooldown in seconds. {scaleInCooldown:Tag:Before}");
scaleInCooldown = (long)scaleAction.Cooldown.TotalMinutes; scaleInCooldown = (long)scaleAction.Cooldown.TotalMinutes;
_logTracer.Info($"Scalein cooldown in seconds. After: {scaleInCooldown}"); _logTracer.Info($"Scalein cooldown in seconds. {scaleInCooldown:Tag:After}");
} else { } else {
continue; continue;
} }
@ -152,7 +150,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
return scaleset; return scaleset;
} }
_log.Info($"{SCALESET_LOG_PREFIX} scaleset resize: scaleset_id:{scaleset.ScalesetId} size:{scaleset.Size}"); _log.Info($"scaleset resize: {scaleset.ScalesetId:Tag:ScalesetId} - {scaleset.Size:Tag:Size}");
var shrinkQueue = new ShrinkQueue(scaleset.ScalesetId, _context.Queue, _log); var shrinkQueue = new ShrinkQueue(scaleset.ScalesetId, _context.Queue, _log);
// # reset the node delete queue // # reset the node delete queue
@ -164,7 +162,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
// # Treat Azure knowledge of the size of the scaleset as "ground truth" // # Treat Azure knowledge of the size of the scaleset as "ground truth"
var vmssSize = await _context.VmssOperations.GetVmssSize(scaleset.ScalesetId); var vmssSize = await _context.VmssOperations.GetVmssSize(scaleset.ScalesetId);
if (vmssSize is null) { if (vmssSize is null) {
_log.Info($"{SCALESET_LOG_PREFIX} scaleset is unavailable. scaleset_id {scaleset.ScalesetId}"); _log.Info($"scaleset is unavailable {scaleset.ScalesetId:Tag:ScalesetId}");
//#if the scaleset is missing, this is an indication the scaleset //#if the scaleset is missing, this is an indication the scaleset
//# was manually deleted, rather than having OneFuzz delete it. As //# was manually deleted, rather than having OneFuzz delete it. As
@ -193,10 +191,9 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
var updatedScaleSet = scaleset with { State = state }; var updatedScaleSet = scaleset with { State = state };
var r = await Replace(updatedScaleSet); var r = await Replace(updatedScaleSet);
if (!r.IsOk) { if (!r.IsOk) {
var msg = $"Failed to update scaleset {updatedScaleSet.ScalesetId} when updating state from {updatedScaleSet.State} to {state}"; _log.WithHttpStatus(r.ErrorV).Error($"Failed to update scaleset {updatedScaleSet.ScalesetId:Tag:ScalesetId} when updating {updatedScaleSet.State:Tag:StateFrom} - {state:Tag:StateTo}");
_log.WithHttpStatus(r.ErrorV).Error(msg);
// TODO: this should really return OneFuzzResult but then that propagates up the call stack // TODO: this should really return OneFuzzResult but then that propagates up the call stack
throw new Exception(msg); throw new Exception($"Failed to update scaleset {updatedScaleSet.ScalesetId} when updating state from {updatedScaleSet.State} to {state}");
} }
if (state == ScalesetState.Resize) { if (state == ScalesetState.Resize) {
@ -226,20 +223,20 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
public async Async.Task<Scaleset> UpdateConfigs(Scaleset scaleSet) { public async Async.Task<Scaleset> UpdateConfigs(Scaleset scaleSet) {
if (scaleSet.State == ScalesetState.Halt) { if (scaleSet.State == ScalesetState.Halt) {
_log.Info($"{SCALESET_LOG_PREFIX} not updating configs, scalest is set to be deleted. scaleset_id: {scaleSet.ScalesetId}"); _log.Info($"not updating configs, scalest is set to be deleted {scaleSet.ScalesetId:Tag:ScalesetId}");
return scaleSet; return scaleSet;
} }
if (!scaleSet.NeedsConfigUpdate) { if (!scaleSet.NeedsConfigUpdate) {
_log.Verbose($"{SCALESET_LOG_PREFIX} config update no needed. scaleset_id: {scaleSet.ScalesetId}"); _log.Verbose($"config update no needed {scaleSet.ScalesetId:Tag:ScalesetId}");
return scaleSet; return scaleSet;
} }
_log.Info($"{SCALESET_LOG_PREFIX} updating scalset configs. scalset_id: {scaleSet.ScalesetId}"); _log.Info($"updating scalset configs {scaleSet.ScalesetId:Tag:ScalesetId}");
var pool = await _context.PoolOperations.GetByName(scaleSet.PoolName); var pool = await _context.PoolOperations.GetByName(scaleSet.PoolName);
if (!pool.IsOk) { if (!pool.IsOk) {
_log.Error($"{SCALESET_LOG_PREFIX} unable to find pool during config update. pool:{scaleSet.PoolName}, scaleset_id:{scaleSet.ScalesetId}"); _log.Error($"unable to find pool during config update {scaleSet.PoolName:Tag:PoolName} - {scaleSet.ScalesetId:Tag:ScalesetId}");
scaleSet = await SetFailed(scaleSet, pool.ErrorV); scaleSet = await SetFailed(scaleSet, pool.ErrorV);
return scaleSet; return scaleSet;
} }
@ -248,7 +245,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
var res = await _context.VmssOperations.UpdateExtensions(scaleSet.ScalesetId, extensions); var res = await _context.VmssOperations.UpdateExtensions(scaleSet.ScalesetId, extensions);
if (!res.IsOk) { if (!res.IsOk) {
_log.Info($"{SCALESET_LOG_PREFIX} unable to update configs {string.Join(',', res.ErrorV.Errors!)}"); _log.Info($"unable to update configs {string.Join(',', res.ErrorV.Errors!)}");
} }
return scaleSet; return scaleSet;
@ -260,12 +257,12 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
public async Async.Task<Scaleset> Setup(Scaleset scaleset) { public async Async.Task<Scaleset> Setup(Scaleset scaleset) {
//# TODO: How do we pass in SSH configs for Windows? Previously //# TODO: How do we pass in SSH configs for Windows? Previously
//# This was done as part of the generated per-task setup script. //# This was done as part of the generated per-task setup script.
_logTracer.Info($"{SCALESET_LOG_PREFIX} setup. scalset_id: {scaleset.ScalesetId}"); _logTracer.Info($"setup {scaleset.ScalesetId:Tag:ScalesetId}");
var network = await Network.Init(scaleset.Region, _context); var network = await Network.Init(scaleset.Region, _context);
var networkId = await network.GetId(); var networkId = await network.GetId();
if (networkId is null) { if (networkId is null) {
_logTracer.Info($"{SCALESET_LOG_PREFIX} creating network. region: {scaleset.Region} scaleset_id:{scaleset.ScalesetId}"); _logTracer.Info($"creating network {scaleset.Region:Tag:Region} - {scaleset.ScalesetId:Tag:ScalesetId}");
var result = await network.Create(); var result = await network.Create();
if (!result.IsOk) { if (!result.IsOk) {
return await SetFailed(scaleset, result.ErrorV); return await SetFailed(scaleset, result.ErrorV);
@ -274,14 +271,14 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
//TODO : why are we saving scaleset here ? //TODO : why are we saving scaleset here ?
var r = await Update(scaleset); var r = await Update(scaleset);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.Error($"Failed to save scaleset {scaleset.ScalesetId} due to {r.ErrorV}"); _logTracer.Error($"Failed to save scaleset {scaleset.ScalesetId:Tag:ScalesetId} due to {r.ErrorV:Tag:Error}");
} }
return scaleset; return scaleset;
} }
if (scaleset.Auth is null) { if (scaleset.Auth is null) {
_logTracer.Error($"Scaleset Auth is missing for scaleset {scaleset.ScalesetId}"); _logTracer.Error($"Scaleset Auth is missing for scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
return await SetFailed(scaleset, new Error(ErrorCode.UNABLE_TO_CREATE, new[] { "missing required auth" })); return await SetFailed(scaleset, new Error(ErrorCode.UNABLE_TO_CREATE, new[] { "missing required auth" }));
} }
@ -290,11 +287,11 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
if (vmss is null) { if (vmss is null) {
var pool = await _context.PoolOperations.GetByName(scaleset.PoolName); var pool = await _context.PoolOperations.GetByName(scaleset.PoolName);
if (!pool.IsOk) { if (!pool.IsOk) {
_logTracer.Error($"Failed to get pool by name {scaleset.PoolName} for scaleset: {scaleset.ScalesetId}"); _logTracer.Error($"failed to get pool by name {scaleset.PoolName:Tag:PoolName} for scaleset: {scaleset.ScalesetId:Tag:ScalesetId}");
return await SetFailed(scaleset, pool.ErrorV); return await SetFailed(scaleset, pool.ErrorV);
} }
_logTracer.Info($"{SCALESET_LOG_PREFIX} creating scaleset. scaleset_id {scaleset.ScalesetId}"); _logTracer.Info($"creating scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
var extensions = await _context.Extensions.FuzzExtensions(pool.OkV, scaleset); var extensions = await _context.Extensions.FuzzExtensions(pool.OkV, scaleset);
var result = await _context.VmssOperations.CreateVmss( var result = await _context.VmssOperations.CreateVmss(
scaleset.Region, scaleset.Region,
@ -311,32 +308,32 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
scaleset.Tags); scaleset.Tags);
if (!result.IsOk) { if (!result.IsOk) {
_logTracer.Error($"Failed to create scaleset {scaleset.ScalesetId} due to {result.ErrorV}"); _logTracer.Error($"Failed to create scaleset {scaleset.ScalesetId:Tag:ScalesetId} due to {result.ErrorV:Tag:Error}");
return await SetFailed(scaleset, result.ErrorV); return await SetFailed(scaleset, result.ErrorV);
} else { } else {
// TODO: Link up auto scale resource with diagnostics // TODO: Link up auto scale resource with diagnostics
_logTracer.Info($"{SCALESET_LOG_PREFIX} creating scaleset scaleset_id: {scaleset.ScalesetId}"); _logTracer.Info($"creating scaleset: {scaleset.ScalesetId:Tag:ScalesetId}");
} }
} else if (vmss.ProvisioningState == "Creating") { } else if (vmss.ProvisioningState == "Creating") {
var result = TrySetIdentity(scaleset, vmss); var result = TrySetIdentity(scaleset, vmss);
if (!result.IsOk) { if (!result.IsOk) {
_logTracer.Warning($"Could not set identity due to: {result.ErrorV}"); _logTracer.Warning($"Could not set identity due to: {result.ErrorV:Tag:Error} for {scaleset.ScalesetId:Tag:ScalesetId}");
} else { } else {
scaleset = result.OkV; scaleset = result.OkV;
} }
} else { } else {
_logTracer.Info($"{SCALESET_LOG_PREFIX} scaleset running scaleset_id {scaleset.ScalesetId}"); _logTracer.Info($"scaleset running {scaleset.ScalesetId:Tag:ScalesetId}");
var autoScaling = await TryEnableAutoScaling(scaleset); var autoScaling = await TryEnableAutoScaling(scaleset);
if (!autoScaling.IsOk) { if (!autoScaling.IsOk) {
_logTracer.Error($"Failed to set auto-scaling for {scaleset.ScalesetId} due to {autoScaling.ErrorV}"); _logTracer.Error($"failed to set auto-scaling for {scaleset.ScalesetId:Tag:ScalesetId} due to {autoScaling.ErrorV:Tag:Error}");
return await SetFailed(scaleset, autoScaling.ErrorV); return await SetFailed(scaleset, autoScaling.ErrorV);
} }
var result = TrySetIdentity(scaleset, vmss); var result = TrySetIdentity(scaleset, vmss);
if (!result.IsOk) { if (!result.IsOk) {
_logTracer.Error($"Failed to set identity for scaleset {scaleset.ScalesetId} due to: {result.ErrorV}"); _logTracer.Error($"failed to set identity for scaleset {scaleset.ScalesetId:Tag:ScalesetId} due to: {result.ErrorV:Tag:Error}");
return await SetFailed(scaleset, result.ErrorV); return await SetFailed(scaleset, result.ErrorV);
} else { } else {
scaleset = await SetState(scaleset, ScalesetState.Running); scaleset = await SetState(scaleset, ScalesetState.Running);
@ -345,7 +342,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
var rr = await Replace(scaleset); var rr = await Replace(scaleset);
if (!rr.IsOk) { if (!rr.IsOk) {
_logTracer.WithHttpStatus(rr.ErrorV).Error($"Failed to save scale data for scale set: {scaleset.ScalesetId}"); _logTracer.WithHttpStatus(rr.ErrorV).Error($"Failed to save scale data for scale set: {scaleset.ScalesetId:Tag:ScalesetId}");
} }
return scaleset; return scaleset;
@ -372,11 +369,11 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
} }
async Async.Task<OneFuzzResultVoid> TryEnableAutoScaling(Scaleset scaleset) { async Async.Task<OneFuzzResultVoid> TryEnableAutoScaling(Scaleset scaleset) {
_logTracer.Info($"Trying to add auto scaling for scaleset {scaleset.ScalesetId}"); _logTracer.Info($"Trying to add auto scaling for scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
var r = await _context.PoolOperations.GetByName(scaleset.PoolName); var r = await _context.PoolOperations.GetByName(scaleset.PoolName);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.Error($"Failed to get pool by name: {scaleset.PoolName} error: {r.ErrorV}"); _logTracer.Error($"Failed to get pool by name: {scaleset.PoolName:Tag:PoolName} - {r.ErrorV:Tag:Error}");
return r.ErrorV; return r.ErrorV;
} }
var pool = r.OkV; var pool = r.OkV;
@ -387,7 +384,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
if (!capacity.HasValue) { if (!capacity.HasValue) {
var capacityFailed = OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_FIND, $"Failed to get capacity for scaleset {scaleset.ScalesetId}"); var capacityFailed = OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_FIND, $"Failed to get capacity for scaleset {scaleset.ScalesetId}");
_logTracer.Error(capacityFailed.ErrorV.ToString()); _logTracer.Error($"Failed to get capacity for scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
return capacityFailed; return capacityFailed;
} }
@ -395,7 +392,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
if (poolQueueUri is null) { if (poolQueueUri is null) {
var failedToFindQueueUri = OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_FIND, $"Failed to get pool queue uri for scaleset {scaleset.ScalesetId}"); var failedToFindQueueUri = OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_FIND, $"Failed to get pool queue uri for scaleset {scaleset.ScalesetId}");
_logTracer.Error(failedToFindQueueUri.ErrorV.ToString()); _logTracer.Error($"Failed to get pool queue uri for scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
return failedToFindQueueUri; return failedToFindQueueUri;
} }
@ -403,7 +400,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
if (autoScaleConfig is null) { if (autoScaleConfig is null) {
autoScaleProfile = _context.AutoScaleOperations.DefaultAutoScaleProfile(poolQueueUri!, capacity.Value); autoScaleProfile = _context.AutoScaleOperations.DefaultAutoScaleProfile(poolQueueUri!, capacity.Value);
} else { } else {
_logTracer.Info("Using existing auto scale settings from database"); _logTracer.Info($"Using existing auto scale settings from database for scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
autoScaleProfile = _context.AutoScaleOperations.CreateAutoScaleProfile( autoScaleProfile = _context.AutoScaleOperations.CreateAutoScaleProfile(
queueUri: poolQueueUri!, queueUri: poolQueueUri!,
minAmount: autoScaleConfig.Min, minAmount: autoScaleConfig.Min,
@ -417,13 +414,13 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
} }
_logTracer.Info($"Added auto scale resource to scaleset: {scaleset.ScalesetId}"); _logTracer.Info($"Added auto scale resource to scaleset: {scaleset.ScalesetId:Tag:ScalesetId}");
return await _context.AutoScaleOperations.AddAutoScaleToVmss(scaleset.ScalesetId, autoScaleProfile); return await _context.AutoScaleOperations.AddAutoScaleToVmss(scaleset.ScalesetId, autoScaleProfile);
} }
public async Async.Task<Scaleset> Init(Scaleset scaleset) { public async Async.Task<Scaleset> Init(Scaleset scaleset) {
_logTracer.Info($"{SCALESET_LOG_PREFIX} init. scaleset_id:{scaleset.ScalesetId}"); _logTracer.Info($"init {scaleset.ScalesetId:Tag:ScalesetId}");
var shrinkQueue = new ShrinkQueue(scaleset.ScalesetId, _context.Queue, _logTracer); var shrinkQueue = new ShrinkQueue(scaleset.ScalesetId, _context.Queue, _logTracer);
await shrinkQueue.Create(); await shrinkQueue.Create();
// Handle the race condition between a pool being deleted and a // Handle the race condition between a pool being deleted and a
@ -431,21 +428,21 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
var poolResult = await _context.PoolOperations.GetByName(scaleset.PoolName); var poolResult = await _context.PoolOperations.GetByName(scaleset.PoolName);
if (!poolResult.IsOk) { if (!poolResult.IsOk) {
_logTracer.Error($"Failed to get pool by name {scaleset.PoolName} for scaleset: {scaleset.ScalesetId} due to {poolResult.ErrorV}"); _logTracer.Error($"failed to get pool by name {scaleset.PoolName:Tag:PoolName} for scaleset: {scaleset.ScalesetId:Tag:ScalesetId} due to {poolResult.ErrorV:Tag:Error}");
return await SetFailed(scaleset, poolResult.ErrorV); return await SetFailed(scaleset, poolResult.ErrorV);
} }
var pool = poolResult.OkV; var pool = poolResult.OkV;
if (pool.State == PoolState.Init) { if (pool.State == PoolState.Init) {
_logTracer.Info($"{SCALESET_LOG_PREFIX} waiting for pool. pool_name:{scaleset.PoolName} scaleset_id:{scaleset.ScalesetId}"); _logTracer.Info($"waiting for pool {scaleset.PoolName:Tag:PoolName} - {scaleset.ScalesetId:Tag:ScalesetId}");
} else if (pool.State == PoolState.Running) { } else if (pool.State == PoolState.Running) {
var imageOsResult = await _context.ImageOperations.GetOs(scaleset.Region, scaleset.Image); var imageOsResult = await _context.ImageOperations.GetOs(scaleset.Region, scaleset.Image);
if (!imageOsResult.IsOk) { if (!imageOsResult.IsOk) {
_logTracer.Error($"Failed to get OS with region: {scaleset.Region} image:{scaleset.Image} for scaleset: {scaleset.ScalesetId} due to {imageOsResult.ErrorV}"); _logTracer.Error($"failed to get OS with region: {scaleset.Region:Tag:Region} {scaleset.Image:Tag:Image} for scaleset: {scaleset.ScalesetId:Tag:ScalesetId} due to {imageOsResult.ErrorV:Tag:Error}");
return await SetFailed(scaleset, imageOsResult.ErrorV); return await SetFailed(scaleset, imageOsResult.ErrorV);
} else if (imageOsResult.OkV != pool.Os) { } else if (imageOsResult.OkV != pool.Os) {
_logTracer.Error($"Got invalid OS: {imageOsResult.OkV} for scaleset: {scaleset.ScalesetId} expected OS {pool.Os}"); _logTracer.Error($"got invalid OS: {imageOsResult.OkV:Tag:ActualOs} for scaleset: {scaleset.ScalesetId:Tag:ScalesetId} expected OS {pool.Os:Tag:ExpectedOs}");
return await SetFailed(scaleset, new Error(ErrorCode.INVALID_REQUEST, new[] { $"invalid os (got: {imageOsResult.OkV} needed: {pool.Os})" })); return await SetFailed(scaleset, new Error(ErrorCode.INVALID_REQUEST, new[] { $"invalid os (got: {imageOsResult.OkV} needed: {pool.Os})" }));
} else { } else {
return await SetState(scaleset, ScalesetState.Setup); return await SetState(scaleset, ScalesetState.Setup);
@ -462,21 +459,21 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
await shrinkQueue.Delete(); await shrinkQueue.Delete();
await foreach (var node in _context.NodeOperations.SearchStates(scalesetId: scaleset.ScalesetId)) { await foreach (var node in _context.NodeOperations.SearchStates(scalesetId: scaleset.ScalesetId)) {
_log.Info($"{SCALESET_LOG_PREFIX} deleting node scaleset_id {scaleset.ScalesetId} machine_id {node.MachineId}"); _log.Info($"deleting node {scaleset.ScalesetId:Tag:ScalesetId} - {node.MachineId:Tag:MachineId}");
await _context.NodeOperations.Delete(node); await _context.NodeOperations.Delete(node);
} }
_log.Info($"{SCALESET_LOG_PREFIX} scaleset delete starting: scaleset_id:{scaleset.ScalesetId}"); _log.Info($"scaleset delete starting - {scaleset.ScalesetId:Tag:ScalesetId}");
if (await _context.VmssOperations.DeleteVmss(scaleset.ScalesetId)) { if (await _context.VmssOperations.DeleteVmss(scaleset.ScalesetId)) {
_log.Info($"{SCALESET_LOG_PREFIX}scaleset deleted: scaleset_id {scaleset.ScalesetId}"); _log.Info($"scaleset deleted: {scaleset.ScalesetId:Tag:ScalesetId}");
var r = await Delete(scaleset); var r = await Delete(scaleset);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"Failed to delete scaleset record {scaleset.ScalesetId}"); _log.WithHttpStatus(r.ErrorV).Error($"Failed to delete scaleset record {scaleset.ScalesetId:Tag:ScalesetId}");
} }
} else { } else {
var r = await Update(scaleset); var r = await Update(scaleset);
if (!r.IsOk) { if (!r.IsOk) {
_log.WithHttpStatus(r.ErrorV).Error($"Failed to save scaleset record {scaleset.ScalesetId}"); _log.WithHttpStatus(r.ErrorV).Error($"Failed to save scaleset record {scaleset.ScalesetId:Tag:ScalesetId}");
} }
} }
@ -489,17 +486,17 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
/// <param name="scaleSet"></param> /// <param name="scaleSet"></param>
/// <returns>true if scaleset got modified</returns> /// <returns>true if scaleset got modified</returns>
public async Async.Task<(bool, Scaleset)> CleanupNodes(Scaleset scaleSet) { public async Async.Task<(bool, Scaleset)> CleanupNodes(Scaleset scaleSet) {
_log.Info($"{SCALESET_LOG_PREFIX} cleaning up nodes. scaleset_id {scaleSet.ScalesetId}"); _log.Info($"cleaning up nodes {scaleSet.ScalesetId:Tag:ScalesetId}");
if (scaleSet.State == ScalesetState.Halt) { if (scaleSet.State == ScalesetState.Halt) {
_log.Info($"{SCALESET_LOG_PREFIX} halting scaleset scaleset_id {scaleSet.ScalesetId}"); _log.Info($"halting scaleset {scaleSet.ScalesetId:Tag:ScalesetId}");
scaleSet = await Halt(scaleSet); scaleSet = await Halt(scaleSet);
return (true, scaleSet); return (true, scaleSet);
} }
var pool = await _context.PoolOperations.GetByName(scaleSet.PoolName); var pool = await _context.PoolOperations.GetByName(scaleSet.PoolName);
if (!pool.IsOk) { if (!pool.IsOk) {
_log.Error($"unable to find pool during cleanup {scaleSet.ScalesetId} - {scaleSet.PoolName}"); _log.Error($"unable to find pool during cleanup {scaleSet.ScalesetId:Tag:ScalesetId} - {scaleSet.PoolName:Tag:PoolName}");
scaleSet = await SetFailed(scaleSet, pool.ErrorV!); scaleSet = await SetFailed(scaleSet, pool.ErrorV!);
return (true, scaleSet); return (true, scaleSet);
} }
@ -513,7 +510,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
//# Nodes do not exists in scalesets but in table due to unknown failure //# Nodes do not exists in scalesets but in table due to unknown failure
await foreach (var node in nodes) { await foreach (var node in nodes) {
if (!azureNodes.ContainsKey(node.MachineId)) { if (!azureNodes.ContainsKey(node.MachineId)) {
_log.Info($"{SCALESET_LOG_PREFIX} no longer in scaleset. scaleset_id:{scaleSet.ScalesetId} machine_id:{node.MachineId}"); _log.Info($"{node.MachineId:Tag:MachineId} no longer in scaleset {scaleSet.ScalesetId:Tag:ScalesetId}");
await _context.NodeOperations.Delete(node); await _context.NodeOperations.Delete(node);
} }
} }
@ -534,7 +531,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
if (nodeMachineIds.Contains(machineId)) { if (nodeMachineIds.Contains(machineId)) {
continue; continue;
} }
_log.Info($"{SCALESET_LOG_PREFIX} adding missing azure node. scaleset_id:{scaleSet.ScalesetId} machine_id:{machineId}"); _log.Info($"adding missing azure node {machineId:Tag:MachineId} to scaleset {scaleSet.ScalesetId:Tag:ScalesetId}");
//# Note, using `new=True` makes it such that if a node already has //# Note, using `new=True` makes it such that if a node already has
//# checked in, this won't overwrite it. //# checked in, this won't overwrite it.
@ -604,18 +601,18 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
public async Async.Task ReimageNodes(Scaleset scaleset, IEnumerable<Node> nodes, NodeDisposalStrategy disposalStrategy) { public async Async.Task ReimageNodes(Scaleset scaleset, IEnumerable<Node> nodes, NodeDisposalStrategy disposalStrategy) {
if (nodes is null || !nodes.Any()) { if (nodes is null || !nodes.Any()) {
_log.Info($"{SCALESET_LOG_PREFIX} no nodes to reimage: scaleset_id: {scaleset.ScalesetId}"); _log.Info($"no nodes to reimage: {scaleset.ScalesetId:Tag:ScalesetId}");
return; return;
} }
if (scaleset.State == ScalesetState.Shutdown) { if (scaleset.State == ScalesetState.Shutdown) {
_log.Info($"{SCALESET_LOG_PREFIX} scaleset shutting down, deleting rather than reimaging nodes. scaleset_id: {scaleset.ScalesetId}"); _log.Info($"scaleset shutting down, deleting rather than reimaging nodes {scaleset.ScalesetId:Tag:ScalesetId}");
await DeleteNodes(scaleset, nodes, disposalStrategy); await DeleteNodes(scaleset, nodes, disposalStrategy);
return; return;
} }
if (scaleset.State == ScalesetState.Halt) { if (scaleset.State == ScalesetState.Halt) {
_log.Info($"{SCALESET_LOG_PREFIX} scaleset halting, ignoring node reimage: scaleset_id:{scaleset.ScalesetId}"); _log.Info($"scaleset halting, ignoring node reimage {scaleset.ScalesetId:Tag:ScalesetId}");
return; return;
} }
@ -626,20 +623,20 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
} }
if (node.DebugKeepNode) { if (node.DebugKeepNode) {
_log.Warning($"{SCALESET_LOG_PREFIX} not reimaging manually overriden node. scaleset_id:{scaleset.ScalesetId} machine_id:{node.MachineId}"); _log.Warning($"not reimaging manually overriden node {node.MachineId:Tag:MachineId} in scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
} else { } else {
_ = machineIds.Add(node.MachineId); _ = machineIds.Add(node.MachineId);
} }
} }
if (!machineIds.Any()) { if (!machineIds.Any()) {
_log.Info($"{SCALESET_LOG_PREFIX} no nodes to reimage: {scaleset.ScalesetId}"); _log.Info($"no nodes to reimage {scaleset.ScalesetId:Tag:ScalesetId}");
return; return;
} }
switch (disposalStrategy) { switch (disposalStrategy) {
case NodeDisposalStrategy.Decommission: case NodeDisposalStrategy.Decommission:
_log.Info($"{SCALESET_LOG_PREFIX} decommissioning nodes"); _log.Info($"decommissioning nodes");
await Async.Task.WhenAll(nodes await Async.Task.WhenAll(nodes
.Where(node => machineIds.Contains(node.MachineId)) .Where(node => machineIds.Contains(node.MachineId))
.Select(async node => { .Select(async node => {
@ -657,7 +654,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
await _context.NodeOperations.ReleaseScaleInProtection(node).IgnoreResult(); await _context.NodeOperations.ReleaseScaleInProtection(node).IgnoreResult();
})); }));
} else { } else {
_log.Info($"failed to reimage nodes due to {r.ErrorV}"); _log.Info($"failed to reimage nodes due to {r.ErrorV:Tag:Error}");
} }
return; return;
} }
@ -666,7 +663,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
public async Async.Task DeleteNodes(Scaleset scaleset, IEnumerable<Node> nodes, NodeDisposalStrategy disposalStrategy) { public async Async.Task DeleteNodes(Scaleset scaleset, IEnumerable<Node> nodes, NodeDisposalStrategy disposalStrategy) {
if (nodes is null || !nodes.Any()) { if (nodes is null || !nodes.Any()) {
_log.Info($"{SCALESET_LOG_PREFIX} no nodes to delete: scaleset_id: {scaleset.ScalesetId}"); _log.Info($"no nodes to delete: scaleset_id: {scaleset.ScalesetId:Tag:ScalesetId}");
return; return;
} }
@ -674,14 +671,14 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
nodes = await Async.Task.WhenAll(nodes.Select(node => _context.NodeOperations.SetHalt(node))); nodes = await Async.Task.WhenAll(nodes.Select(node => _context.NodeOperations.SetHalt(node)));
if (scaleset.State == ScalesetState.Halt) { if (scaleset.State == ScalesetState.Halt) {
_log.Info($"{SCALESET_LOG_PREFIX} scaleset halting, ignoring deletion {scaleset.ScalesetId}"); _log.Info($"scaleset halting, ignoring deletion {scaleset.ScalesetId:Tag:ScalesetId}");
return; return;
} }
HashSet<Guid> machineIds = new(); HashSet<Guid> machineIds = new();
foreach (var node in nodes) { foreach (var node in nodes) {
if (node.DebugKeepNode) { if (node.DebugKeepNode) {
_log.Warning($"{SCALESET_LOG_PREFIX} not deleting manually overriden node. scaleset_id:{scaleset.ScalesetId} machine_id:{node.MachineId}"); _log.Warning($"not deleting manually overriden node {node.MachineId:Tag:MachineId} in scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
} else { } else {
_ = machineIds.Add(node.MachineId); _ = machineIds.Add(node.MachineId);
} }
@ -689,7 +686,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
switch (disposalStrategy) { switch (disposalStrategy) {
case NodeDisposalStrategy.Decommission: case NodeDisposalStrategy.Decommission:
_log.Info($"{SCALESET_LOG_PREFIX} decommissioning nodes"); _log.Info($"decommissioning nodes");
await Async.Task.WhenAll(nodes await Async.Task.WhenAll(nodes
.Where(node => machineIds.Contains(node.MachineId)) .Where(node => machineIds.Contains(node.MachineId))
.Select(async node => { .Select(async node => {
@ -698,7 +695,7 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
return; return;
case NodeDisposalStrategy.ScaleIn: case NodeDisposalStrategy.ScaleIn:
_log.Info($"{SCALESET_LOG_PREFIX} deleting nodes scaleset_id: {scaleset.ScalesetId} machine_id: {string.Join(", ", machineIds)}"); _log.Info($"deleting nodes {scaleset.ScalesetId:Tag:ScalesetId} {string.Join(", ", machineIds):Tag:MachineIds}");
await _context.VmssOperations.DeleteNodes(scaleset.ScalesetId, machineIds); await _context.VmssOperations.DeleteNodes(scaleset.ScalesetId, machineIds);
await Async.Task.WhenAll(nodes await Async.Task.WhenAll(nodes
.Where(node => machineIds.Contains(node.MachineId)) .Where(node => machineIds.Contains(node.MachineId))
@ -742,10 +739,10 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
var nodeCount = await _context.NodeOperations.SearchStates(scalesetId: scaleset.ScalesetId).CountAsync(); var nodeCount = await _context.NodeOperations.SearchStates(scalesetId: scaleset.ScalesetId).CountAsync();
if (nodeCount == scaleset.Size) { if (nodeCount == scaleset.Size) {
_log.Info($"{SCALESET_LOG_PREFIX} resize finished: {scaleset.ScalesetId}"); _log.Info($"resize finished {scaleset.ScalesetId:Tag:ScalesetId}");
return await SetState(scaleset, ScalesetState.Running); return await SetState(scaleset, ScalesetState.Running);
} else { } else {
_log.Info($"{SCALESET_LOG_PREFIX} resize finished, waiting for nodes to check in. scaleset_id: {scaleset.ScalesetId} ({nodeCount} of {scaleset.Size} checked in)"); _log.Info($"resize finished, waiting for nodes to check in {scaleset.ScalesetId:Tag:ScalesetId} ({nodeCount:Tag:NodeCount} of {scaleset.Size:Tag:Size} checked in)");
return scaleset; return scaleset;
} }
} }
@ -753,13 +750,13 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
private async Async.Task<Scaleset> ResizeGrow(Scaleset scaleset) { private async Async.Task<Scaleset> ResizeGrow(Scaleset scaleset) {
var resizeResult = await _context.VmssOperations.ResizeVmss(scaleset.ScalesetId, scaleset.Size); var resizeResult = await _context.VmssOperations.ResizeVmss(scaleset.ScalesetId, scaleset.Size);
if (resizeResult.IsOk == false) { if (resizeResult.IsOk == false) {
_log.Info($"{SCALESET_LOG_PREFIX} scaleset is mid-operation already scaleset_id: {scaleset.ScalesetId} message: {resizeResult.ErrorV}"); _log.Info($"scaleset is mid-operation already {scaleset.ScalesetId:Tag:ScalesetId} {resizeResult.ErrorV:Tag:Error}");
} }
return scaleset; return scaleset;
} }
private async Async.Task<Scaleset> ResizeShrink(Scaleset scaleset, long? toRemove) { private async Async.Task<Scaleset> ResizeShrink(Scaleset scaleset, long? toRemove) {
_log.Info($"{SCALESET_LOG_PREFIX} shrinking scaleset. scaleset_id: {scaleset.ScalesetId} to remove {toRemove}"); _log.Info($"shrinking scaleset {scaleset.ScalesetId:Tag:ScalesetId} {toRemove:Tag:ToRemove}");
if (!toRemove.HasValue) { if (!toRemove.HasValue) {
return scaleset; return scaleset;
@ -812,18 +809,18 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
public async Async.Task<Scaleset> Shutdown(Scaleset scaleset) { public async Async.Task<Scaleset> Shutdown(Scaleset scaleset) {
var size = await _context.VmssOperations.GetVmssSize(scaleset.ScalesetId); var size = await _context.VmssOperations.GetVmssSize(scaleset.ScalesetId);
if (size == null) { if (size == null) {
_logTracer.Info($"{SCALESET_LOG_PREFIX} scale set shutdown: scaleset already deleted - scaleset_id:{scaleset.ScalesetId}"); _logTracer.Info($"scale set shutdown: scaleset already deleted {scaleset.ScalesetId:Tag:ScalesetId}");
return await Halt(scaleset); return await Halt(scaleset);
} }
_logTracer.Info($"{SCALESET_LOG_PREFIX} scaleset shutdown: scaleset_id:{scaleset.ScalesetId} size:{size}"); _logTracer.Info($"scaleset shutdown {scaleset.ScalesetId:Tag:ScalesetId} {size:Tag:Size}");
{ {
var nodes = _context.NodeOperations.SearchStates(scalesetId: scaleset.ScalesetId); var nodes = _context.NodeOperations.SearchStates(scalesetId: scaleset.ScalesetId);
// TODO: Parallelization opportunity // TODO: Parallelization opportunity
await nodes.ForEachAwaitAsync(_context.NodeOperations.SetShutdown); await nodes.ForEachAwaitAsync(_context.NodeOperations.SetShutdown);
} }
_logTracer.Info($"{SCALESET_LOG_PREFIX} checking for existing auto scale settings {scaleset.ScalesetId}"); _logTracer.Info($"checking for existing auto scale settings {scaleset.ScalesetId:Tag:ScalesetId}");
var autoScalePolicy = _context.AutoScaleOperations.GetAutoscaleSettings(scaleset.ScalesetId); var autoScalePolicy = _context.AutoScaleOperations.GetAutoscaleSettings(scaleset.ScalesetId);
if (autoScalePolicy.IsOk && autoScalePolicy.OkV != null) { if (autoScalePolicy.IsOk && autoScalePolicy.OkV != null) {
@ -851,30 +848,30 @@ public class ScalesetOperations : StatefulOrm<Scaleset, ScalesetState, ScalesetO
// auto scale will scale in the remaining nodes, // auto scale will scale in the remaining nodes,
// the scale set will have 0 instances, // the scale set will have 0 instances,
// and once the scale set is empty, we will delete it. // and once the scale set is empty, we will delete it.
_logTracer.Info($"{SCALESET_LOG_PREFIX} Getting nodes with scale in protection"); _logTracer.Info($"Getting nodes with scale in protection");
var vmsWithProtection = await _context.VmssOperations.ListVmss( var vmsWithProtection = await _context.VmssOperations.ListVmss(
scaleset.ScalesetId, scaleset.ScalesetId,
(vmResource) => vmResource.Data.ProtectionPolicy.ProtectFromScaleIn.HasValue && vmResource.Data.ProtectionPolicy.ProtectFromScaleIn.Value (vmResource) => vmResource.Data.ProtectionPolicy.ProtectFromScaleIn.HasValue && vmResource.Data.ProtectionPolicy.ProtectFromScaleIn.Value
); );
_logTracer.Info($"{SCALESET_LOG_PREFIX} {JsonSerializer.Serialize(vmsWithProtection)}"); _logTracer.Info($"{JsonSerializer.Serialize(vmsWithProtection):Tag:VMsWithProtection}");
if (vmsWithProtection != null && vmsWithProtection.Any()) { if (vmsWithProtection != null && vmsWithProtection.Any()) {
var numVmsWithProtection = vmsWithProtection.Count; var numVmsWithProtection = vmsWithProtection.Count;
profile.Capacity.Minimum = numVmsWithProtection.ToString(); profile.Capacity.Minimum = numVmsWithProtection.ToString();
profile.Capacity.Default = numVmsWithProtection.ToString(); profile.Capacity.Default = numVmsWithProtection.ToString();
} else { } else {
_logTracer.Error($"Failed to list vmss for scaleset {scaleset.ScalesetId}"); _logTracer.Error($"Failed to list vmss for scaleset {scaleset.ScalesetId:Tag:ScalesetId}");
} }
} }
var updatedAutoScale = await _context.AutoScaleOperations.UpdateAutoscale(autoScalePolicy.OkV.Data); var updatedAutoScale = await _context.AutoScaleOperations.UpdateAutoscale(autoScalePolicy.OkV.Data);
if (!updatedAutoScale.IsOk) { if (!updatedAutoScale.IsOk) {
_logTracer.Error($"Failed to update auto scale {updatedAutoScale}"); _logTracer.Error($"Failed to update auto scale {updatedAutoScale.ErrorV:Tag:Error}");
} }
} else if (!autoScalePolicy.IsOk) { } else if (!autoScalePolicy.IsOk) {
_logTracer.Error(autoScalePolicy.ErrorV); _logTracer.Error(autoScalePolicy.ErrorV);
} else { } else {
_logTracer.Info($"No existing auto scale settings found for {scaleset.ScalesetId}"); _logTracer.Info($"No existing auto scale settings found for {scaleset.ScalesetId:Tag:ScalesetId}");
} }
if (size == 0) { if (size == 0) {

View File

@ -53,19 +53,19 @@ public class Scheduler : IScheduler {
var notReadyCount = tasks.Count - seen.Count; var notReadyCount = tasks.Count - seen.Count;
if (notReadyCount > 0) { if (notReadyCount > 0) {
_logTracer.Info($"tasks not ready {notReadyCount}. Tasks seen: {seen.Count}"); _logTracer.Info($"{notReadyCount:Tag:TasksNotReady} - {seen.Count:Tag:TasksSeen}");
} }
} }
private async Async.Task<bool> ScheduleWorkset(WorkSet workSet, Pool pool, long count) { private async Async.Task<bool> ScheduleWorkset(WorkSet workSet, Pool pool, long count) {
if (!PoolStateHelper.Available.Contains(pool.State)) { if (!PoolStateHelper.Available.Contains(pool.State)) {
_logTracer.Info($"pool not available for work: {pool.Name} state: {pool.State}"); _logTracer.Info($"pool not available {pool.Name:Tag:PoolName} - {pool.State:Tag:PoolState}");
return false; return false;
} }
for (var i = 0L; i < count; i++) { for (var i = 0L; i < count; i++) {
if (!await _poolOperations.ScheduleWorkset(pool, workSet)) { if (!await _poolOperations.ScheduleWorkset(pool, workSet)) {
_logTracer.Error($"unable to schedule workset. pool:{pool.Name} workset: {workSet}"); _logTracer.Error($"unable to schedule workset {pool.Name:Tag:PoolName} {workSet:Tag:WorkSet}");
return false; return false;
} }
} }
@ -150,24 +150,24 @@ public class Scheduler : IScheduler {
if (!poolCache.TryGetValue(poolKey, out var pool)) { if (!poolCache.TryGetValue(poolKey, out var pool)) {
var foundPool = await _taskOperations.GetPool(task); var foundPool = await _taskOperations.GetPool(task);
if (foundPool is null) { if (foundPool is null) {
_logTracer.Info($"unable to find pool for task: {task.TaskId}"); _logTracer.Info($"unable to find pool for task: {task.TaskId:Tag:TaskId}");
return OneFuzzResult<(BucketConfig, WorkUnit)>.Error(ErrorCode.UNABLE_TO_FIND, $"unable to find pool for the task {task.TaskId} in job {task.JobId}"); return OneFuzzResult<(BucketConfig, WorkUnit)>.Error(ErrorCode.UNABLE_TO_FIND, $"unable to find pool for the task {task.TaskId} in job {task.JobId}");
} }
pool = poolCache[poolKey] = foundPool; pool = poolCache[poolKey] = foundPool;
} }
_logTracer.Info($"scheduling task: {task.TaskId}"); _logTracer.Info($"scheduling task: {task.TaskId:Tag:TaskId}");
var job = await _jobOperations.Get(task.JobId); var job = await _jobOperations.Get(task.JobId);
if (job is null) { if (job is null) {
_logTracer.Error($"invalid job_id {task.JobId} for task {task.TaskId}"); _logTracer.Error($"invalid job {task.JobId:Tag:JobId} for task {task.TaskId:Tag:TaskId}");
return OneFuzzResult<(BucketConfig, WorkUnit)>.Error(ErrorCode.INVALID_JOB, $"invalid job_id {task.JobId} for task {task.TaskId}"); return OneFuzzResult<(BucketConfig, WorkUnit)>.Error(ErrorCode.INVALID_JOB, $"invalid job_id {task.JobId} for task {task.TaskId}");
} }
var taskConfig = await _config.BuildTaskConfig(job, task); var taskConfig = await _config.BuildTaskConfig(job, task);
if (taskConfig is null) { if (taskConfig is null) {
_logTracer.Error($"unable to build task config for task: {task.TaskId}"); _logTracer.Error($"unable to build task config for task: {task.TaskId:Tag:TaskId}");
return OneFuzzResult<(BucketConfig, WorkUnit)>.Error(ErrorCode.INVALID_CONFIGURATION, $"unable to build task config for task: {task.TaskId} in job {task.JobId}"); return OneFuzzResult<(BucketConfig, WorkUnit)>.Error(ErrorCode.INVALID_CONFIGURATION, $"unable to build task config for task: {task.TaskId} in job {task.JobId}");
} }
var setupContainer = task.Config.Containers?.FirstOrDefault(c => c.Type == ContainerType.Setup) ?? throw new Exception($"task missing setup container: task_type = {task.Config.Task.Type}"); var setupContainer = task.Config.Containers?.FirstOrDefault(c => c.Type == ContainerType.Setup) ?? throw new Exception($"task missing setup container: task_type = {task.Config.Task.Type}");

View File

@ -30,7 +30,7 @@ public class Subnet : ISubnet {
} }
public async Task<OneFuzzResultVoid> CreateVirtualNetwork(string resourceGroup, string name, Region region, NetworkConfig networkConfig) { public async Task<OneFuzzResultVoid> CreateVirtualNetwork(string resourceGroup, string name, Region region, NetworkConfig networkConfig) {
_logTracer.Info($"creating subnet - resource group:{resourceGroup} name:{name} region: {region}"); _logTracer.Info($"creating subnet - {resourceGroup:Tag:ResourceGroup} {name:Tag:Name} {region:Tag:Region}");
var virtualNetParam = new VirtualNetworkData { var virtualNetParam = new VirtualNetworkData {
Location = region, Location = region,
@ -46,7 +46,7 @@ public class Subnet : ISubnet {
var onefuzzOwner = _context.ServiceConfiguration.OneFuzzOwner; var onefuzzOwner = _context.ServiceConfiguration.OneFuzzOwner;
if (!string.IsNullOrEmpty(onefuzzOwner)) { if (!string.IsNullOrEmpty(onefuzzOwner)) {
if (!virtualNetParam.Tags.TryAdd("OWNER", onefuzzOwner)) { if (!virtualNetParam.Tags.TryAdd("OWNER", onefuzzOwner)) {
_logTracer.Warning($"Failed to add tag 'OWNER':{onefuzzOwner} to virtual network {resourceGroup}:{name}"); _logTracer.Warning($"Failed to add tag 'OWNER':{onefuzzOwner:Tag:Owner} to virtual network {resourceGroup:Tag:ResourceGroup}:{name:Tag:Name}");
} }
} }
@ -56,7 +56,7 @@ public class Subnet : ISubnet {
name, name,
virtualNetParam); virtualNetParam);
} catch (RequestFailedException ex) { } catch (RequestFailedException ex) {
_logTracer.Error($"network creation failed: {name}:{region} {{error}}"); _logTracer.Error($"network creation failed: {name:Tag:Name}:{region:Tag:Region} {ex.Message:Tag:Error}");
return OneFuzzResultVoid.Error( return OneFuzzResultVoid.Error(
ErrorCode.UNABLE_TO_CREATE_NETWORK, ErrorCode.UNABLE_TO_CREATE_NETWORK,
ex.ToString() ex.ToString()

View File

@ -19,7 +19,7 @@ public sealed class TaskEventOperations : Orm<TaskEvent>, ITaskEventOperations {
private static string GetEventData(WorkerEvent ev) { private static string GetEventData(WorkerEvent ev) {
return ev.Done != null ? $"exit status: {ev.Done.ExitStatus}" : return ev.Done != null ? $"exit status: {ev.Done.ExitStatus}" :
ev.Running != null ? string.Empty : "Unrecognized event: {ev}"; ev.Running != null ? string.Empty : $"Unrecognized event: {ev}";
} }
private static string GetEventType(WorkerEvent ev) { private static string GetEventType(WorkerEvent ev) {

View File

@ -100,7 +100,7 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
public async Async.Task MarkStopping(Task task) { public async Async.Task MarkStopping(Task task) {
if (task.State.ShuttingDown()) { if (task.State.ShuttingDown()) {
_logTracer.Verbose($"ignoring post - task stop calls to stop {task.JobId}:{task.TaskId}"); _logTracer.Verbose($"ignoring post - task stop calls to stop {task.JobId:Tag:JobId}:{task.TaskId:Tag:TaskId}");
return; return;
} }
@ -120,7 +120,7 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
return; return;
} }
_logTracer.Info($"task failed {task.JobId}:{task.TaskId} - {error}"); _logTracer.Info($"task failed {task.JobId:Tag:JobId}:{task.TaskId:Tag:TaskId} - {error:Tag:Error}");
task = await SetState(task with { Error = error }, TaskState.Stopping); task = await SetState(task with { Error = error }, TaskState.Stopping);
await MarkDependantsFailed(task, taskInJob); await MarkDependantsFailed(task, taskInJob);
@ -151,7 +151,7 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
var r = await Replace(task); var r = await Replace(task);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace task with jobid: {task.JobId} and taskid: {task.TaskId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace task for job {task.JobId:Tag:JobId} - {task.TaskId:Tag:TaskId}");
} }
var _events = _context.Events; var _events = _context.Events;
@ -209,11 +209,11 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
var r = await _context.TaskOperations.Insert(task); var r = await _context.TaskOperations.Insert(task);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert task {task.TaskId}"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to insert task {task.TaskId:Tag:TaskId}");
} }
await _context.Events.SendEvent(new EventTaskCreated(jobId, task.TaskId, config, userInfo)); await _context.Events.SendEvent(new EventTaskCreated(jobId, task.TaskId, config, userInfo));
_logTracer.Info($"created task. job_id:{jobId} task_id:{task.TaskId} type:{task.Config.Task.Type}"); _logTracer.Info($"created task {jobId:Tag:JobId} {task.TaskId:Tag:TaskId} {task.Config.Task.Type:Tag:TaskType}");
return OneFuzzResult<Task>.Ok(task); return OneFuzzResult<Task>.Ok(task);
} }
@ -243,14 +243,14 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
var pool = await _context.PoolOperations.GetByName(task.Config.Pool.PoolName); var pool = await _context.PoolOperations.GetByName(task.Config.Pool.PoolName);
if (!pool.IsOk) { if (!pool.IsOk) {
_logTracer.Info($"unable to find pool from task: {task.TaskId}"); _logTracer.Info($"unable to find pool from task: {task.TaskId:Tag:TaskId}");
return null; return null;
} }
var scaleset = await _context.ScalesetOperations.SearchByPool(task.Config.Pool.PoolName).FirstOrDefaultAsync(); var scaleset = await _context.ScalesetOperations.SearchByPool(task.Config.Pool.PoolName).FirstOrDefaultAsync();
if (scaleset == null) { if (scaleset == null) {
_logTracer.Warning($"no scalesets are defined for task: {task.JobId}:{task.TaskId}"); _logTracer.Warning($"no scalesets are defined for task: {task.JobId:Tag:JobId} - {task.TaskId:Tag:TaskId}");
return null; return null;
} }
@ -269,7 +269,7 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
} }
if (t.JobId != task.JobId) { if (t.JobId != task.JobId) {
_logTracer.Critical("Tasks are not from the same job"); _logTracer.Critical($"Tasks with same id {taskId:Tag:TaskId} are not from the same job {t.JobId:Tag:NewTaskJobId} - {task.JobId:Tag:CurrentTaskJobId}");
} }
if (!t.State.HasStarted()) { if (!t.State.HasStarted()) {
@ -288,7 +288,7 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
var pool = await _context.PoolOperations.GetByName(p.PoolName); var pool = await _context.PoolOperations.GetByName(p.PoolName);
if (!pool.IsOk) { if (!pool.IsOk) {
_logTracer.Info( _logTracer.Info(
$"unable to schedule task to pool [{task.Config.Pool.PoolName}]: {task.TaskId} - {pool.ErrorV}" $"unable to schedule task to pool [{task.Config.Pool.PoolName:Tag:PoolName}]: {task.TaskId:Tag:TaskId} - {pool.ErrorV:Tag:Error}"
); );
return null; return null;
} }
@ -302,7 +302,7 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
var pool = await _context.PoolOperations.GetByName(scaleset.PoolName); var pool = await _context.PoolOperations.GetByName(scaleset.PoolName);
if (!pool.IsOk) { if (!pool.IsOk) {
_logTracer.Info( _logTracer.Info(
$"unable to schedule task to pool [{scaleset.PoolName}]: {task.TaskId} - {pool.ErrorV}" $"unable to schedule task to pool [{scaleset.PoolName:Tag:PoolName}]: {task.TaskId:Tag:TaskId} - {pool.ErrorV:Tag:Error}"
); );
return null; return null;
} }
@ -311,7 +311,7 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
} }
} }
_logTracer.Warning($"unable to find a scaleset that matches the task prereqs: {task.TaskId}"); _logTracer.Warning($"unable to find a scaleset that matches the task prereqs: {task.TaskId:Tag:TaskId}");
return null; return null;
} }
@ -322,7 +322,7 @@ public class TaskOperations : StatefulOrm<Task, TaskState, TaskOperations>, ITas
public async Async.Task<Task> Stopping(Task task) { public async Async.Task<Task> Stopping(Task task) {
_logTracer.Info($"stopping task : {task.JobId}, {task.TaskId}"); _logTracer.Info($"stopping task : {task.JobId:Tag:JobId} - {task.TaskId:Tag:TaskId}");
await _context.NodeOperations.StopTask(task.TaskId); await _context.NodeOperations.StopTask(task.TaskId);
var anyRemainingNodes = await _context.NodeTasksOperations.GetNodesByTaskId(task.TaskId).AnyAsync(); var anyRemainingNodes = await _context.NodeTasksOperations.GetNodesByTaskId(task.TaskId).AnyAsync();
if (!anyRemainingNodes) { if (!anyRemainingNodes) {

View File

@ -89,16 +89,16 @@ public class VmOperations : IVmOperations {
public async Async.Task<bool> DeleteVmComponents(string name, Nsg? nsg) { public async Async.Task<bool> DeleteVmComponents(string name, Nsg? nsg) {
var resourceGroup = _context.Creds.GetBaseResourceGroup(); var resourceGroup = _context.Creds.GetBaseResourceGroup();
_logTracer.Info($"deleting vm components {resourceGroup}:{name}"); _logTracer.Info($"deleting vm components {resourceGroup:Tag:ResourceGroup}:{name:Tag:VmName}");
if (await GetVm(name) != null) { if (await GetVm(name) != null) {
_logTracer.Info($"deleting vm {resourceGroup}:{name}"); _logTracer.Info($"deleting vm {resourceGroup:Tag:ResourceGroup}:{name:Tag:VmName}");
await DeleteVm(name); await DeleteVm(name);
return false; return false;
} }
var nic = await _context.IpOperations.GetPublicNic(resourceGroup, name); var nic = await _context.IpOperations.GetPublicNic(resourceGroup, name);
if (nic != null) { if (nic != null) {
_logTracer.Info($"deleting nic {resourceGroup}:{name}"); _logTracer.Info($"deleting nic {resourceGroup:Tag:ResourceGroup}:{name:Tag:VmName}");
if (nic.Data.NetworkSecurityGroup != null && nsg != null) { if (nic.Data.NetworkSecurityGroup != null && nsg != null) {
_ = await _context.NsgOperations.DissociateNic(nsg, nic); _ = await _context.NsgOperations.DissociateNic(nsg, nic);
return false; return false;
@ -108,7 +108,7 @@ public class VmOperations : IVmOperations {
} }
if (await _context.IpOperations.GetIp(resourceGroup, name) != null) { if (await _context.IpOperations.GetIp(resourceGroup, name) != null) {
_logTracer.Info($"deleting ip {resourceGroup}:{name}"); _logTracer.Info($"deleting ip {resourceGroup:Tag:ResourceGroup}:{name:Tag:VmName}");
await _context.IpOperations.DeleteIp(resourceGroup, name); await _context.IpOperations.DeleteIp(resourceGroup, name);
return false; return false;
} }
@ -119,7 +119,7 @@ public class VmOperations : IVmOperations {
if (await disks.AnyAsync()) { if (await disks.AnyAsync()) {
await foreach (var disk in disks) { await foreach (var disk in disks) {
_logTracer.Info($"deleting disk {resourceGroup}:{disk?.Data.Name}"); _logTracer.Info($"deleting disk {resourceGroup:Tag:ResourceGroup}:{disk?.Data.Name:Tag:DiskName}");
_ = await _context.DiskOperations.DeleteDisk(resourceGroup, disk?.Data.Name!); _ = await _context.DiskOperations.DeleteDisk(resourceGroup, disk?.Data.Name!);
} }
return false; return false;
@ -129,13 +129,13 @@ public class VmOperations : IVmOperations {
} }
public async System.Threading.Tasks.Task DeleteVm(string name) { public async System.Threading.Tasks.Task DeleteVm(string name) {
_logTracer.Info($"deleting vm: {_context.Creds.GetBaseResourceGroup()} {name}"); _logTracer.Info($"deleting vm: {_context.Creds.GetBaseResourceGroup():Tag:ResourceGroup} {name:Tag:VmName}");
var r = await _context.Creds.GetResourceGroupResource() var r = await _context.Creds.GetResourceGroupResource()
.GetVirtualMachineAsync(name).Result.Value .GetVirtualMachineAsync(name).Result.Value
.DeleteAsync(WaitUntil.Started); .DeleteAsync(WaitUntil.Started);
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_logTracer.Error($"failed to start deletion of vm {name} due to {r.GetRawResponse().ReasonPhrase}"); _logTracer.Error($"failed to start deletion of vm {name:Tag:VmName} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
return; return;
} }
@ -151,7 +151,7 @@ public class VmOperations : IVmOperations {
var extension = await GetExtension(vm.Name, extensionName); var extension = await GetExtension(vm.Name, extensionName);
if (extension != null) { if (extension != null) {
_logTracer.Info( _logTracer.Info(
$"vm extension state: {vm.Name} - {extensionName} - {extension.ProvisioningState}" $"vm extension state: {vm.Name:Tag:VmName} - {extensionName:Tag:ExtensionName} - {extension.ProvisioningState:Tag:ExtensionProvisioningState}"
); );
status.Add(extension.ProvisioningState); status.Add(extension.ProvisioningState);
} else { } else {
@ -172,7 +172,7 @@ public class VmOperations : IVmOperations {
"failed to launch extension" "failed to launch extension"
); );
} else if (!(status.Contains("Creating") || status.Contains("Updating"))) { } else if (!(status.Contains("Creating") || status.Contains("Updating"))) {
_logTracer.Error($"vm agent - unknown state {vm.Name}: {JsonConvert.SerializeObject(status)}"); _logTracer.Error($"vm agent - unknown state {vm.Name:Tag:VmName}: {JsonConvert.SerializeObject(status):Tag:Status}");
} }
} }
@ -184,7 +184,7 @@ public class VmOperations : IVmOperations {
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
_logTracer.Info($"vm creating: {vm.Name}"); _logTracer.Info($"vm creating: {vm.Name:Tag:VmName}");
return await CreateVm( return await CreateVm(
vm.Name, vm.Name,
@ -207,13 +207,13 @@ public class VmOperations : IVmOperations {
return (await vm.Value.GetVirtualMachineExtensionAsync(extensionName)).Value.Data; return (await vm.Value.GetVirtualMachineExtensionAsync(extensionName)).Value.Data;
} catch (RequestFailedException ex) { } catch (RequestFailedException ex) {
_logTracer.Info($"extension does not exist {ex}"); _logTracer.Info($"extension does not exist {ex.Message:Tag:Error}");
return null; return null;
} }
} }
public async Async.Task CreateExtension(string vmName, string extensionName, VirtualMachineExtensionData extension) { public async Async.Task CreateExtension(string vmName, string extensionName, VirtualMachineExtensionData extension) {
_logTracer.Info($"creating extension: {_context.Creds.GetBaseResourceGroup()}:{vmName}:{extensionName}"); _logTracer.Info($"creating extension: {_context.Creds.GetBaseResourceGroup():Tag:ResourceGroup} - {vmName:Tag:VmName} - {extensionName:Tag:ExtensionName}");
var vm = await _context.Creds.GetResourceGroupResource().GetVirtualMachineAsync(vmName); var vm = await _context.Creds.GetResourceGroupResource().GetVirtualMachineAsync(vmName);
try { try {
@ -225,7 +225,7 @@ public class VmOperations : IVmOperations {
} catch (RequestFailedException ex) when } catch (RequestFailedException ex) when
(ex.Status == 409 && (ex.Status == 409 &&
(ex.Message.Contains("VM is marked for deletion") || ex.Message.Contains("The request failed due to conflict with a concurrent request."))) { (ex.Message.Contains("VM is marked for deletion") || ex.Message.Contains("The request failed due to conflict with a concurrent request."))) {
_logTracer.Info($"Tried to create extension but failed due to {ex.Message}"); _logTracer.Info($"Tried to create {extensionName:Tag:ExtensionName} for {vmName:Tag:VmName} but failed due to {ex.Message:Tag:Error}");
} }
return; return;
} }
@ -241,7 +241,7 @@ public class VmOperations : IVmOperations {
IDictionary<string, string>? tags IDictionary<string, string>? tags
) { ) {
var resourceGroup = _context.Creds.GetBaseResourceGroup(); var resourceGroup = _context.Creds.GetBaseResourceGroup();
_logTracer.Info($"creating vm {resourceGroup}:{location}:{name}"); _logTracer.Info($"creating vm {resourceGroup:Tag:ResourceGroup} - {location:Tag:Location} {name:Tag:VmName}");
var nic = await _context.IpOperations.GetPublicNic(resourceGroup, name); var nic = await _context.IpOperations.GetPublicNic(resourceGroup, name);
if (nic == null) { if (nic == null) {
@ -250,7 +250,7 @@ public class VmOperations : IVmOperations {
return result; return result;
} }
_logTracer.Info("waiting on nic creation"); _logTracer.Info($"waiting on nic creation for {name:Tag:VmName}");
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
@ -312,7 +312,7 @@ public class VmOperations : IVmOperations {
tags?.ToList() tags?.ToList()
.ForEach(kvp => { .ForEach(kvp => {
if (!vmParams.Tags.TryAdd(kvp.Key, kvp.Value)) { if (!vmParams.Tags.TryAdd(kvp.Key, kvp.Value)) {
_logTracer.Warning($"Failed to add tag {kvp.Key}:{kvp.Value} to vm {name}"); _logTracer.Warning($"Failed to add tag {kvp.Key:Tag:Key}:{kvp.Value:Tag:Value} to vm {name:Tag:VmName}");
} }
}); });
} }

View File

@ -53,7 +53,7 @@ public class VmssOperations : IVmssOperations {
private readonly IMemoryCache _cache; private readonly IMemoryCache _cache;
public VmssOperations(ILogTracer log, IOnefuzzContext context, IMemoryCache cache) { public VmssOperations(ILogTracer log, IOnefuzzContext context, IMemoryCache cache) {
_log = log; _log = log.WithTag("Component", "vmss-operations");
_creds = context.Creds; _creds = context.Creds;
_imageOps = context.ImageOperations; _imageOps = context.ImageOperations;
_serviceConfig = context.ServiceConfiguration; _serviceConfig = context.ServiceConfiguration;
@ -65,7 +65,7 @@ public class VmssOperations : IVmssOperations {
var result = await r.DeleteAsync(WaitUntil.Started, forceDeletion: forceDeletion); var result = await r.DeleteAsync(WaitUntil.Started, forceDeletion: forceDeletion);
var raw = result.GetRawResponse(); var raw = result.GetRawResponse();
if (raw.IsError) { if (raw.IsError) {
_log.WithHttpStatus((raw.Status, raw.ReasonPhrase)).Error($"Failed to delete vmss: {name}"); _log.WithHttpStatus((raw.Status, raw.ReasonPhrase)).Error($"Failed to delete vmss: {name:Tag:VmssName}");
return false; return false;
} else { } else {
return true; return true;
@ -83,7 +83,7 @@ public class VmssOperations : IVmssOperations {
public async Async.Task<OneFuzzResultVoid> ResizeVmss(Guid name, long capacity) { public async Async.Task<OneFuzzResultVoid> ResizeVmss(Guid name, long capacity) {
var canUpdate = await CheckCanUpdate(name); var canUpdate = await CheckCanUpdate(name);
if (canUpdate.IsOk) { if (canUpdate.IsOk) {
_log.Info($"updating VM count - name: {name} vm_count: {capacity}"); _log.Info($"updating VM count {name:Tag:VmssName} - {capacity:Tag:Count}");
var scalesetResource = GetVmssResource(name); var scalesetResource = GetVmssResource(name);
var patch = new VirtualMachineScaleSetPatch(); var patch = new VirtualMachineScaleSetPatch();
patch.Sku.Capacity = capacity; patch.Sku.Capacity = capacity;
@ -105,7 +105,7 @@ public class VmssOperations : IVmssOperations {
public async Async.Task<VirtualMachineScaleSetData?> GetVmss(Guid name) { public async Async.Task<VirtualMachineScaleSetData?> GetVmss(Guid name) {
try { try {
var res = await GetVmssResource(name).GetAsync(); var res = await GetVmssResource(name).GetAsync();
_log.Verbose($"getting vmss: {name}"); _log.Verbose($"getting vmss: {name:Tag:VmssName}");
return res.Value.Data; return res.Value.Data;
} catch (RequestFailedException ex) when (ex.Status == 404) { } catch (RequestFailedException ex) when (ex.Status == 404) {
return null; return null;
@ -127,7 +127,7 @@ public class VmssOperations : IVmssOperations {
public async Async.Task<OneFuzzResultVoid> UpdateExtensions(Guid name, IList<VirtualMachineScaleSetExtensionData> extensions) { public async Async.Task<OneFuzzResultVoid> UpdateExtensions(Guid name, IList<VirtualMachineScaleSetExtensionData> extensions) {
var canUpdate = await CheckCanUpdate(name); var canUpdate = await CheckCanUpdate(name);
if (canUpdate.IsOk) { if (canUpdate.IsOk) {
_log.Info($"updating VM extensions: {name}"); _log.Info($"updating VM extensions: {name:Tag:VmssName}");
var res = GetVmssResource(name); var res = GetVmssResource(name);
var patch = new VirtualMachineScaleSetPatch() { var patch = new VirtualMachineScaleSetPatch() {
VirtualMachineProfile = VirtualMachineProfile =
@ -138,7 +138,7 @@ public class VmssOperations : IVmssOperations {
patch.VirtualMachineProfile.ExtensionProfile.Extensions.Add(ext); patch.VirtualMachineProfile.ExtensionProfile.Extensions.Add(ext);
} }
_ = await res.UpdateAsync(WaitUntil.Started, patch); _ = await res.UpdateAsync(WaitUntil.Started, patch);
_log.Info($"VM extensions updated: {name}"); _log.Info($"VM extensions updated: {name:Tag:VmssName}");
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} else { } else {
@ -147,19 +147,19 @@ public class VmssOperations : IVmssOperations {
} }
public async Async.Task<IDictionary<Guid, string>> ListInstanceIds(Guid name) { public async Async.Task<IDictionary<Guid, string>> ListInstanceIds(Guid name) {
_log.Verbose($"get instance IDs for scaleset {name}"); _log.Verbose($"get instance IDs for scaleset {name:Tag:VmssName}");
var results = new Dictionary<Guid, string>(); var results = new Dictionary<Guid, string>();
VirtualMachineScaleSetResource res; VirtualMachineScaleSetResource res;
try { try {
var r = await GetVmssResource(name).GetAsync(); var r = await GetVmssResource(name).GetAsync();
res = r.Value; res = r.Value;
} catch (Exception ex) when (ex is RequestFailedException) { } catch (Exception ex) when (ex is RequestFailedException) {
_log.Verbose($"vm does not exist {name}"); _log.Verbose($"vm does not exist {name:Tag:VmssName}");
return results; return results;
} }
if (res is null) { if (res is null) {
_log.Verbose($"vm does not exist {name}"); _log.Verbose($"vm does not exist {name:Tag:VmssName}");
return results; return results;
} else { } else {
try { try {
@ -169,19 +169,19 @@ public class VmssOperations : IVmssOperations {
if (Guid.TryParse(instance.Data.VmId, out key)) { if (Guid.TryParse(instance.Data.VmId, out key)) {
results[key] = instance.Data.InstanceId; results[key] = instance.Data.InstanceId;
} else { } else {
_log.Error($"failed to convert vmId {instance.Data.VmId} to Guid"); _log.Error($"failed to convert vmId {instance.Data.VmId:Tag:VmId} to Guid in {name:Tag:VmssName}");
} }
} }
} }
} catch (Exception ex) when (ex is RequestFailedException || ex is CloudException) { } catch (Exception ex) when (ex is RequestFailedException || ex is CloudException) {
_log.Exception(ex, $"vm does not exist {name}"); _log.Exception(ex, $"vm does not exist {name:Tag:VmssName}");
} }
} }
return results; return results;
} }
public async Async.Task<OneFuzzResult<VirtualMachineScaleSetVmResource>> GetInstanceVm(Guid name, Guid vmId) { public async Async.Task<OneFuzzResult<VirtualMachineScaleSetVmResource>> GetInstanceVm(Guid name, Guid vmId) {
_log.Info($"get instance ID for scaleset node: {name}:{vmId}"); _log.Info($"get instance ID for scaleset node: {name:Tag:VmssName}:{vmId:Tag:VmId}");
var scaleSet = GetVmssResource(name); var scaleSet = GetVmssResource(name);
try { try {
@ -196,7 +196,7 @@ public class VmssOperations : IVmssOperations {
} }
} }
} catch (Exception ex) when (ex is RequestFailedException || ex is CloudException) { } catch (Exception ex) when (ex is RequestFailedException || ex is CloudException) {
_log.Exception(ex, $"unable to find vm instance: {name}:{vmId}"); _log.Exception(ex, $"unable to find vm instance: {name:Tag:VmssName}:{vmId:Tag:VmId}");
return OneFuzzResult<VirtualMachineScaleSetVmResource>.Error(ErrorCode.UNABLE_TO_FIND, $"unable to find vm instance: {name}:{vmId}"); return OneFuzzResult<VirtualMachineScaleSetVmResource>.Error(ErrorCode.UNABLE_TO_FIND, $"unable to find vm instance: {name}:{vmId}");
} }
return OneFuzzResult<VirtualMachineScaleSetVmResource>.Error(ErrorCode.UNABLE_TO_FIND, $"unable to find scaleset machine: {name}:{vmId}"); return OneFuzzResult<VirtualMachineScaleSetVmResource>.Error(ErrorCode.UNABLE_TO_FIND, $"unable to find scaleset machine: {name}:{vmId}");
@ -225,11 +225,11 @@ public class VmssOperations : IVmssOperations {
_ = await vmCollection.CreateOrUpdateAsync(WaitUntil.Started, instanceVm.Data.InstanceId, instanceVm.Data); _ = await vmCollection.CreateOrUpdateAsync(WaitUntil.Started, instanceVm.Data.InstanceId, instanceVm.Data);
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} catch { } catch {
var msg = $"unable to set protection policy on: {vmId}:{instanceVm.Id}"; var msg = $"unable to set protection policy on: {vmId}:{instanceVm.Id} in vmss {name}";
return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, msg); return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, msg);
} }
} else { } else {
_log.Info($"scale in protection was already set to {protectFromScaleIn} on vm {vmId} for scaleset {name}"); _log.Info($"scale in protection was already set to {protectFromScaleIn:Tag:ProtectFromScaleIn} on vm {vmId:Tag:VmId} for scaleset {name:Tag:VmssName}");
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
} }
@ -252,7 +252,7 @@ public class VmssOperations : IVmssOperations {
if (vmss is not null) { if (vmss is not null) {
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
_log.Info($"creating VM name: {name}, vm_sku: {vmSku}, vm_count: {vmCount}, image: {image}, subnet: {networkId}, spot_instance: {spotInstance}"); _log.Info($"creating VM name: {name:Tag:VmssName} {vmSku:Tag:VmSku} {vmCount:Tag:VmCount} {image:Tag:Image} {networkId:Tag:Subnet} {spotInstance:Tag:SpotInstance}");
var getOsResult = await _imageOps.GetOs(location, image); var getOsResult = await _imageOps.GetOs(location, image);
if (!getOsResult.IsOk) { if (!getOsResult.IsOk) {
@ -369,9 +369,8 @@ public class VmssOperations : IVmssOperations {
var rg = _creds.GetResourceGroupResource(); var rg = _creds.GetResourceGroupResource();
var createUpdate = await rg.GetVirtualMachineScaleSets().CreateOrUpdateAsync(WaitUntil.Started, name.ToString(), vmssData); var createUpdate = await rg.GetVirtualMachineScaleSets().CreateOrUpdateAsync(WaitUntil.Started, name.ToString(), vmssData);
if (createUpdate.GetRawResponse().IsError) { if (createUpdate.GetRawResponse().IsError) {
var msg = $"Failed to create new scaleset due to {createUpdate.GetRawResponse().ReasonPhrase}"; _log.Error($"Failed to create new scaleset due to {createUpdate.GetRawResponse().ReasonPhrase:Tag:Error}");
_log.Error(msg); return OneFuzzResultVoid.Error(ErrorCode.VM_CREATE_FAILED, new[] { $"Failed to create new scaleset due to {createUpdate.GetRawResponse().ReasonPhrase}" });
return OneFuzzResultVoid.Error(ErrorCode.VM_CREATE_FAILED, new[] { msg });
} else { } else {
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
@ -389,7 +388,7 @@ public class VmssOperations : IVmssOperations {
.Select(vm => vm.Data.InstanceId) .Select(vm => vm.Data.InstanceId)
.ToList(); .ToList();
} catch (RequestFailedException ex) { } catch (RequestFailedException ex) {
_log.Exception(ex, $"cloud error listing vmss: {name}"); _log.Exception(ex, $"cloud error listing vmss: {name:Tag:VmssName}");
} }
return null; return null;
} }
@ -434,7 +433,7 @@ public class VmssOperations : IVmssOperations {
if (machineToInstance.TryGetValue(machineId, out var instanceId)) { if (machineToInstance.TryGetValue(machineId, out var instanceId)) {
_ = instanceIds.Add(instanceId); _ = instanceIds.Add(instanceId);
} else { } else {
_log.Info($"unable to find instance ID for {scalesetId}:{machineId}"); _log.Info($"unable to find instance ID for {scalesetId:Tag:ScalesetId} - {machineId:Tag:MachineId}");
} }
} }
@ -454,15 +453,15 @@ public class VmssOperations : IVmssOperations {
// the instance is up-to-date with the VMSS model. // the instance is up-to-date with the VMSS model.
// The expectation is that these requests are queued and handled subsequently. // The expectation is that these requests are queued and handled subsequently.
// The VMSS Team confirmed this expectation and testing supports it, as well. // The VMSS Team confirmed this expectation and testing supports it, as well.
_log.Info($"upgrading VMSS ndoes - name: {scalesetId} ids: {string.Join(", ", instanceIds)}"); _log.Info($"upgrading VMSS nodes - name: {scalesetId:Tag:ScalesetId} ids: {string.Join(", ", instanceIds):Tag:InstanceIds}");
var r = await vmssResource.UpdateInstancesAsync( var r = await vmssResource.UpdateInstancesAsync(
WaitUntil.Started, WaitUntil.Started,
new VirtualMachineScaleSetVmInstanceRequiredIds(instanceIds)); new VirtualMachineScaleSetVmInstanceRequiredIds(instanceIds));
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_log.Error($"failed to start update instance for scaleset {scalesetId} due to {r.GetRawResponse().ReasonPhrase}"); _log.Error($"failed to start update instance for scaleset {scalesetId:Tag:ScalesetId} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
_log.Info($"reimaging VMSS nodes - name: {scalesetId} ids: {string.Join(", ", instanceIds)}"); _log.Info($"reimaging VMSS nodes: {scalesetId:Tag:ScalesetId} - {string.Join(", ", instanceIds):Tag:InstanceIds}");
// very weird API here… // very weird API here…
var reqInstanceIds = new VirtualMachineScaleSetVmInstanceIds(); var reqInstanceIds = new VirtualMachineScaleSetVmInstanceIds();
@ -472,7 +471,7 @@ public class VmssOperations : IVmssOperations {
r = await vmssResource.ReimageAllAsync(WaitUntil.Started, reqInstanceIds); r = await vmssResource.ReimageAllAsync(WaitUntil.Started, reqInstanceIds);
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_log.Error($"failed to start reimage all for scaleset {scalesetId} due to {r.GetRawResponse().ReasonPhrase}"); _log.Error($"failed to start reimage all for scaleset {scalesetId:Tag:ScalesetId} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
return OneFuzzResultVoid.Ok; return OneFuzzResultVoid.Ok;
} }
@ -480,7 +479,7 @@ public class VmssOperations : IVmssOperations {
public async Async.Task DeleteNodes(Guid scalesetId, IReadOnlySet<Guid> machineIds) { public async Async.Task DeleteNodes(Guid scalesetId, IReadOnlySet<Guid> machineIds) {
var result = await CheckCanUpdate(scalesetId); var result = await CheckCanUpdate(scalesetId);
if (!result.IsOk) { if (!result.IsOk) {
throw new Exception($"cannot delete nodes from scaleset {scalesetId}: {result.ErrorV}"); throw new Exception($"cannot delete nodes from scaleset {scalesetId:Tag:ScalesetId}: {result.ErrorV:Tag:Error}");
} }
var instanceIds = new HashSet<string>(); var instanceIds = new HashSet<string>();
@ -489,7 +488,7 @@ public class VmssOperations : IVmssOperations {
if (machineToInstance.TryGetValue(machineId, out var instanceId)) { if (machineToInstance.TryGetValue(machineId, out var instanceId)) {
_ = instanceIds.Add(instanceId); _ = instanceIds.Add(instanceId);
} else { } else {
_log.Info($"unable to find instance ID for {scalesetId}:{machineId}"); _log.Info($"unable to find instance ID for {scalesetId:Tag:ScalesetId} - {machineId:Tag:VmId}");
} }
} }
@ -505,13 +504,13 @@ public class VmssOperations : IVmssOperations {
var computeClient = _creds.ArmClient; var computeClient = _creds.ArmClient;
var vmssResource = computeClient.GetVirtualMachineScaleSetResource(vmssId); var vmssResource = computeClient.GetVirtualMachineScaleSetResource(vmssId);
_log.Info($"deleting scaleset VMs - name: {scalesetId} ids: {instanceIds}"); _log.Info($"deleting scaleset VMs - name: {scalesetId:Tag:ScalesetId} - {instanceIds:Tag:InstanceIds}");
var r = await vmssResource.DeleteInstancesAsync( var r = await vmssResource.DeleteInstancesAsync(
WaitUntil.Started, WaitUntil.Started,
new VirtualMachineScaleSetVmInstanceRequiredIds(instanceIds)); new VirtualMachineScaleSetVmInstanceRequiredIds(instanceIds));
if (r.GetRawResponse().IsError) { if (r.GetRawResponse().IsError) {
_log.Error($"failed to start deletion of scaleset {scalesetId} due to {r.GetRawResponse().ReasonPhrase}"); _log.Error($"failed to start deletion of scaleset {scalesetId:Tag:ScalesetId} due to {r.GetRawResponse().ReasonPhrase:Tag:Error}");
} }
return; return;
} }

View File

@ -46,7 +46,7 @@ public class WebhookOperations : Orm<Webhook>, IWebhookOperations {
var r = await _context.WebhookMessageLogOperations.Replace(message); var r = await _context.WebhookMessageLogOperations.Replace(message);
if (!r.IsOk) { if (!r.IsOk) {
_logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace webhook message log"); _logTracer.WithHttpStatus(r.ErrorV).Error($"Failed to replace webhook message log {webhook.WebhookId:Tag:WebhookId} - {eventMessage.EventId:Tag:EventId}");
} }
await _context.WebhookMessageLogOperations.QueueWebhook(message); await _context.WebhookMessageLogOperations.QueueWebhook(message);
} }
@ -67,7 +67,7 @@ public class WebhookOperations : Orm<Webhook>, IWebhookOperations {
using var httpClient = _httpFactory.CreateClient(); using var httpClient = _httpFactory.CreateClient();
var client = new Request(httpClient); var client = new Request(httpClient);
_logTracer.Info(data); _logTracer.Info($"{messageLog.WebhookId:Tag:WebhookId} - {data}");
using var response = await client.Post(url: webhook.Url, json: data, headers: headers); using var response = await client.Post(url: webhook.Url, json: data, headers: headers);
if (response.StatusCode == HttpStatusCode.Accepted) { if (response.StatusCode == HttpStatusCode.Accepted) {
return true; return true;
@ -147,17 +147,11 @@ public class WebhookMessageLogOperations : Orm<WebhookMessageLog>, IWebhookMessa
_ => null _ => null
}; };
var log = _logTracer.WithTags(
new[] {
("WebhookId", webhookLog.WebhookId.ToString()),
("EventId", webhookLog.EventId.ToString()) }
);
if (visibilityTimeout == null) { if (visibilityTimeout == null) {
log.Error($"invalid WebhookMessage queue state, not queuing. {webhookLog.WebhookId}:{webhookLog.EventId} - {webhookLog.State}"); _logTracer.Error($"invalid WebhookMessage queue state, not queuing. {webhookLog.WebhookId:Tag:WebhookId}:{webhookLog.EventId:Tag:EventId} - {webhookLog.State:Tag:State}");
} else { } else {
if (!await _context.Queue.QueueObject("webhooks", obj, StorageType.Config, visibilityTimeout: visibilityTimeout)) { if (!await _context.Queue.QueueObject("webhooks", obj, StorageType.Config, visibilityTimeout: visibilityTimeout)) {
log.Warning("failed to queue object"); _logTracer.Warning($"failed to queue object {webhookLog.WebhookId:Tag:WebhookId}:{webhookLog.EventId:Tag:EventId}");
} }
} }
} }
@ -166,12 +160,7 @@ public class WebhookMessageLogOperations : Orm<WebhookMessageLog>, IWebhookMessa
var message = await GetWebhookMessageById(obj.WebhookId, obj.EventId); var message = await GetWebhookMessageById(obj.WebhookId, obj.EventId);
if (message == null) { if (message == null) {
_logTracer.WithTags( _logTracer.Error($"webhook message log not found for webhookId: {obj.WebhookId:Tag:WebhookId} and eventId: {obj.EventId:Tag:EventId}");
new[] {
("WebhookId", obj.WebhookId.ToString()),
("EventId", obj.EventId.ToString()) }
).
Error($"webhook message log not found for webhookId: {obj.WebhookId} and eventId: {obj.EventId}");
} else { } else {
await Process(message); await Process(message);
} }
@ -179,43 +168,37 @@ public class WebhookMessageLogOperations : Orm<WebhookMessageLog>, IWebhookMessa
private async Async.Task Process(WebhookMessageLog message) { private async Async.Task Process(WebhookMessageLog message) {
var log = _logTracer.WithTags(
new[] {
("WebhookId", message.WebhookId.ToString()),
("EventId", message.EventId.ToString()) }
);
if (message.State == WebhookMessageState.Failed || message.State == WebhookMessageState.Succeeded) { if (message.State == WebhookMessageState.Failed || message.State == WebhookMessageState.Succeeded) {
log.Error($"webhook message already handled. {message.WebhookId}:{message.EventId}"); _logTracer.Error($"webhook message already handled. {message.WebhookId:Tag:WebhookId}:{message.EventId:Tag:EventId}");
return; return;
} }
var newMessage = message with { TryCount = message.TryCount + 1 }; var newMessage = message with { TryCount = message.TryCount + 1 };
log.Info($"sending webhook: {message.WebhookId}:{message.EventId}"); _logTracer.Info($"sending webhook: {message.WebhookId:Tag:WebhookId}:{message.EventId:Tag:EventId}");
var success = await Send(newMessage); var success = await Send(newMessage);
if (success) { if (success) {
newMessage = newMessage with { State = WebhookMessageState.Succeeded }; newMessage = newMessage with { State = WebhookMessageState.Succeeded };
var r = await Replace(newMessage); var r = await Replace(newMessage);
if (!r.IsOk) { if (!r.IsOk) {
log.WithHttpStatus(r.ErrorV).Error($"failed to replace webhook message with EventId {newMessage.EventId} with Succeeded"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace webhook message with {newMessage.WebhookId:Tag:WebhookId}:{newMessage.EventId:Tag:EventId} with Succeeded");
} }
log.Info($"sent webhook event {newMessage.WebhookId}:{newMessage.EventId}"); _logTracer.Info($"sent webhook event {newMessage.WebhookId}:{newMessage.EventId}");
} else if (newMessage.TryCount < MAX_TRIES) { } else if (newMessage.TryCount < MAX_TRIES) {
newMessage = newMessage with { State = WebhookMessageState.Retrying }; newMessage = newMessage with { State = WebhookMessageState.Retrying };
var r = await Replace(newMessage); var r = await Replace(newMessage);
if (!r.IsOk) { if (!r.IsOk) {
log.WithHttpStatus(r.ErrorV).Error($"failed to replace webhook message with EventId {newMessage.EventId} with Retrying"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace webhook message with {newMessage.WebhookId:Tag:WebhookId}:{newMessage.EventId:Tag:EventId} with Retrying");
} }
await QueueWebhook(newMessage); await QueueWebhook(newMessage);
log.Warning($"sending webhook event failed, re-queued {newMessage.WebhookId}:{newMessage.EventId}"); _logTracer.Warning($"sending webhook event failed, re-queued {newMessage.WebhookId:Tag:WebhookId}:{newMessage.EventId:Tag:EventId}");
} else { } else {
newMessage = newMessage with { State = WebhookMessageState.Failed }; newMessage = newMessage with { State = WebhookMessageState.Failed };
var r = await Replace(newMessage); var r = await Replace(newMessage);
if (!r.IsOk) { if (!r.IsOk) {
log.WithHttpStatus(r.ErrorV).Error($"failed to replace webhook message with EventId {newMessage.EventId} with Failed"); _logTracer.WithHttpStatus(r.ErrorV).Error($"failed to replace webhook message with EventId {newMessage.WebhookId:Tag:WebhookId}:{newMessage.EventId:Tag:EventId} with Failed");
} }
log.Info($"sending webhook: {newMessage.WebhookId} event: {newMessage.EventId} failed {newMessage.TryCount} times."); _logTracer.Info($"sending webhook: {newMessage.WebhookId:Tag:WebhookId} event: {newMessage.EventId:Tag:EventId} failed {newMessage.TryCount:Tag:TryCount} times.");
} }
} }
@ -223,7 +206,7 @@ public class WebhookMessageLogOperations : Orm<WebhookMessageLog>, IWebhookMessa
var log = _logTracer.WithTag("WebhookId", message.WebhookId.ToString()); var log = _logTracer.WithTag("WebhookId", message.WebhookId.ToString());
var webhook = await _context.WebhookOperations.GetByWebhookId(message.WebhookId); var webhook = await _context.WebhookOperations.GetByWebhookId(message.WebhookId);
if (webhook == null) { if (webhook == null) {
log.Error($"webhook not found for webhookId: {message.WebhookId}"); log.Error($"webhook not found for webhookId: {message.WebhookId:Tag:WebhookId}");
return false; return false;
} }
try { try {

View File

@ -17,7 +17,7 @@ public class Ado : NotificationsBase, IAdo {
public async Async.Task NotifyAdo(AdoTemplate config, Container container, string filename, IReport reportable, bool failTaskOnTransientError) { public async Async.Task NotifyAdo(AdoTemplate config, Container container, string filename, IReport reportable, bool failTaskOnTransientError) {
if (reportable is RegressionReport) { if (reportable is RegressionReport) {
_logTracer.Info($"ado integration does not support regression report. container:{container} filename:{filename}"); _logTracer.Info($"ado integration does not support regression report. container:{container:Tag:Container} filename:{filename:Tag:Filename}");
return; return;
} }
@ -26,7 +26,7 @@ public class Ado : NotificationsBase, IAdo {
var notificationInfo = @$"job_id:{report.JobId} task_id:{report.TaskId} var notificationInfo = @$"job_id:{report.JobId} task_id:{report.TaskId}
container:{container} filename:{filename}"; container:{container} filename:{filename}";
_logTracer.Info($"notify ado: {notificationInfo}"); _logTracer.Info($"notify ado: {report.JobId:Tag:JobId} {report.TaskId:Tag:TaskId} {container:Tag:Container} {filename:Tag:Filename}");
try { try {
var ado = await AdoConnector.AdoConnectorCreator(_context, container, filename, config, report, _logTracer); var ado = await AdoConnector.AdoConnectorCreator(_context, container, filename, config, report, _logTracer);
@ -41,7 +41,7 @@ container:{container} filename:{filename}";
ValueError, ValueError,
*/ */
if (!failTaskOnTransientError && IsTransient(e)) { if (!failTaskOnTransientError && IsTransient(e)) {
_logTracer.Error($"transient ADO notification failure {notificationInfo}"); _logTracer.Error($"transient ADO notification failure {report.JobId:Tag:JobId} {report.TaskId:Tag:TaskId} {container:Tag:Container} {filename:Tag:Filename}");
throw; throw;
} else { } else {
await FailTask(report, e); await FailTask(report, e);
@ -209,9 +209,9 @@ container:{container} filename:{filename}";
if (document.Any()) { if (document.Any()) {
_ = await _client.UpdateWorkItemAsync(document, _project, (int)(item.Id!)); _ = await _client.UpdateWorkItemAsync(document, _project, (int)(item.Id!));
_logTracer.Info($"notify ado: updated work item {item.Id} - {notificationInfo}"); _logTracer.Info($"notify ado: updated {item.Id:Tag:WorkItemId} - {notificationInfo}");
} else { } else {
_logTracer.Info($"notify ado: no update for work item {item.Id} - {notificationInfo}"); _logTracer.Info($"notify ado: no update {item.Id:Tag:WorkItemId} - {notificationInfo}");
} }
} }
@ -274,7 +274,7 @@ container:{container} filename:{filename}";
if (!seen) { if (!seen) {
var entry = await CreateNew(); var entry = await CreateNew();
_logTracer.Info($"notify ado: created new work item {entry.Id} - {notificationInfo}"); _logTracer.Info($"notify ado: created new work item {entry.Id:Tag:EntryId} - {notificationInfo}");
} }
} }
} }

View File

@ -17,7 +17,7 @@ public class GithubIssues : NotificationsBase, IGithubIssues {
} }
if (reportable is RegressionReport) { if (reportable is RegressionReport) {
_logTracer.Info($"github issue integration does not support regression reports. container:{container} filename:{filename}"); _logTracer.Info($"github issue integration does not support regression reports. {container:Tag:Container} - {filename:Tag:Filename}");
return; return;
} }

View File

@ -14,7 +14,7 @@ public abstract class NotificationsBase {
} }
public async Async.Task FailTask(Report report, Exception error) { public async Async.Task FailTask(Report report, Exception error) {
_logTracer.Error($"notification failed: job_id:{report.JobId} task_id:{report.TaskId} err:{error}"); _logTracer.Exception(error, $"notification failed {report.JobId:Tag:JobId} {report.TaskId:Tag:TaskId} {error.Message:Tag:Error}");
var task = await _context.TaskOperations.GetByJobIdAndTaskId(report.JobId, report.TaskId); var task = await _context.TaskOperations.GetByJobIdAndTaskId(report.JobId, report.TaskId);
if (task != null) { if (task != null) {
@ -54,10 +54,10 @@ public abstract class NotificationsBase {
Uri? reportUrl = null) { Uri? reportUrl = null) {
task ??= await context.TaskOperations.GetByJobIdAndTaskId(report.JobId, report.TaskId); task ??= await context.TaskOperations.GetByJobIdAndTaskId(report.JobId, report.TaskId);
var checkedTask = task.EnsureNotNull($"invalid task {report.TaskId}"); var checkedTask = task.EnsureNotNull($"invalid task {report.TaskId:Tag:TaskId}");
job ??= await context.JobOperations.Get(report.JobId); job ??= await context.JobOperations.Get(report.JobId);
var checkedJob = job.EnsureNotNull($"invalid job {report.JobId}"); var checkedJob = job.EnsureNotNull($"invalid job {report.JobId:Tag:JobId}");
if (targetUrl == null) { if (targetUrl == null) {
var setupContainer = Scheduler.GetSetupContainer(checkedTask.Config); var setupContainer = Scheduler.GetSetupContainer(checkedTask.Config);

View File

@ -49,7 +49,7 @@ public class Teams : ITeams {
var client = new Request(_httpFactory.CreateClient()); var client = new Request(_httpFactory.CreateClient());
var response = await client.Post(url: new Uri(configUrl!), JsonSerializer.Serialize(message)); var response = await client.Post(url: new Uri(configUrl!), JsonSerializer.Serialize(message));
if (response == null || !response.IsSuccessStatusCode) { if (response == null || !response.IsSuccessStatusCode) {
_logTracer.Error($"webhook failed {response?.StatusCode} {response?.Content}"); _logTracer.Error($"webhook failed {response?.StatusCode:Tag:StatusCode} {response?.Content}");
} }
} }
@ -61,7 +61,7 @@ public class Teams : ITeams {
if (reportOrRegression is Report report) { if (reportOrRegression is Report report) {
var task = await _context.TaskOperations.GetByJobIdAndTaskId(report.JobId, report.TaskId); var task = await _context.TaskOperations.GetByJobIdAndTaskId(report.JobId, report.TaskId);
if (task == null) { if (task == null) {
_logTracer.Error($"report with invalid task {report.JobId}:{report.TaskId}"); _logTracer.Error($"report with invalid task {report.JobId:Tag:JobId}:{report.TaskId:Tag:TaskId}");
return; return;
} }

View File

@ -211,7 +211,7 @@ namespace ApiService.OneFuzzLib.Orm {
if (func != null) { if (func != null) {
var partitionKey = _partitionKeyGetter?.Invoke(entity); var partitionKey = _partitionKeyGetter?.Invoke(entity);
var rowKey = _rowKeyGetter?.Invoke(entity); var rowKey = _rowKeyGetter?.Invoke(entity);
_logTracer.Info($"processing state update: {typeof(T)} - PartitionKey: {partitionKey} RowKey: {rowKey} - {state}"); _logTracer.Info($"processing state update: {typeof(T):Tag:Type} - {partitionKey:Tag:PartitionKey} {rowKey:Tag:RowKey} - {state:Tag:State}");
return await func(entity); return await func(entity);
} else { } else {
throw new ArgumentException($"State function for state: '{state}' not found on type {typeof(T)}"); throw new ArgumentException($"State function for state: '{state}' not found on type {typeof(T)}");

View File

@ -6,7 +6,8 @@
"Microsoft.Azure.Functions.Worker.IFunctionsWorkerApplicationBuilder", "Microsoft.Azure.Functions.Worker.IFunctionsWorkerApplicationBuilder",
"Microsoft.Extensions.DependencyInjection.IServiceCollection", "Microsoft.Extensions.DependencyInjection.IServiceCollection",
"Moq.Language.Flow.IReturnsResult", "Moq.Language.Flow.IReturnsResult",
"FluentAssertions.AndConstraint" "FluentAssertions.AndConstraint",
"System.Text.StringBuilder"
] ]
} }
} }

View File

@ -14,20 +14,20 @@ sealed class TestLogTracer : ILogTracer {
private readonly Dictionary<string, string> _tags = new(); private readonly Dictionary<string, string> _tags = new();
public IReadOnlyDictionary<string, string> Tags => _tags; public IReadOnlyDictionary<string, string> Tags => _tags;
public void Critical(string message) { public void Critical(LogStringHandler message) {
_output.WriteLine($"[Critical] {message}"); _output.WriteLine($"[Critical] {message.ToString()}");
} }
public void Error(string message) { public void Error(LogStringHandler message) {
_output.WriteLine($"[Error] {message}"); _output.WriteLine($"[Error] {message.ToString()}");
} }
public void Event(string evt, IReadOnlyDictionary<string, double>? metrics) { public void Event(LogStringHandler evt, IReadOnlyDictionary<string, double>? metrics) {
// TODO: metrics // TODO: metrics
_output.WriteLine($"[Event] [{evt}]"); _output.WriteLine($"[Event] [{evt}]");
} }
public void Exception(Exception ex, string message = "", IReadOnlyDictionary<string, double>? metrics = null) { public void Exception(Exception ex, LogStringHandler message = $"", IReadOnlyDictionary<string, double>? metrics = null) {
// TODO: metrics // TODO: metrics
_output.WriteLine($"[Error] {message} {ex}"); _output.WriteLine($"[Error] {message} {ex}");
} }
@ -36,16 +36,16 @@ sealed class TestLogTracer : ILogTracer {
// nothing to do // nothing to do
} }
public void Info(string message) { public void Info(LogStringHandler message) {
_output.WriteLine($"[Info] {message}"); _output.WriteLine($"[Info] {message.ToString()}");
} }
public void Verbose(string message) { public void Verbose(LogStringHandler message) {
_output.WriteLine($"[Verbose] {message}"); _output.WriteLine($"[Verbose] {message.ToString()}");
} }
public void Warning(string message) { public void Warning(LogStringHandler message) {
_output.WriteLine($"[Warning] {message}"); _output.WriteLine($"[Warning] {message.ToString()}");
} }
public ILogTracer WithHttpStatus((int, string) status) { public ILogTracer WithHttpStatus((int, string) status) {
@ -61,6 +61,6 @@ sealed class TestLogTracer : ILogTracer {
} }
public void Error(Error error) { public void Error(Error error) {
Error(error.ToString()); Error($"{error}");
} }
} }

View File

@ -86,10 +86,10 @@ public abstract class FunctionTestBase : IAsyncLifetime {
.Select(async container => { .Select(async container => {
try { try {
using var _ = await blobClient.DeleteBlobContainerAsync(container.Name); using var _ = await blobClient.DeleteBlobContainerAsync(container.Name);
Logger.Info($"cleaned up container {container.Name}"); Logger.Info($"cleaned up container {container.Name:Tag:ContainerName}");
} catch (Exception ex) { } catch (Exception ex) {
// swallow any exceptions: this is a best-effort attempt to cleanup // swallow any exceptions: this is a best-effort attempt to cleanup
Logger.Exception(ex, "error deleting container at end of test"); Logger.Exception(ex, $"error deleting container {container.Name:Tag:ContainerName} at end of test");
} }
}) })
.ToListAsync()); .ToListAsync());
@ -101,10 +101,10 @@ public abstract class FunctionTestBase : IAsyncLifetime {
.Select(async table => { .Select(async table => {
try { try {
using var _ = await tableClient.DeleteTableAsync(table.Name); using var _ = await tableClient.DeleteTableAsync(table.Name);
Logger.Info($"cleaned up table {table.Name}"); Logger.Info($"cleaned up table {table.Name:Tag:TableName}");
} catch (Exception ex) { } catch (Exception ex) {
// swallow any exceptions: this is a best-effort attempt to cleanup // swallow any exceptions: this is a best-effort attempt to cleanup
Logger.Exception(ex, "error deleting table at end of test"); Logger.Exception(ex, $"error deleting table {table.Name:Tag:TableName} at end of test");
} }
}) })
.ToListAsync()); .ToListAsync());