mirror of
https://github.com/google/go-attestation.git
synced 2025-02-21 09:11:46 +00:00
Implement CombineEventlogs().
PiperOrigin-RevId: 410914994
This commit is contained in:
parent
be496f1149
commit
0393b91867
@ -173,7 +173,8 @@ type EventLog struct {
|
||||
// Algs holds the set of algorithms that the event log uses.
|
||||
Algs []HashAlg
|
||||
|
||||
rawEvents []rawEvent
|
||||
rawEvents []rawEvent
|
||||
specIDEvent *specIDEvent
|
||||
}
|
||||
|
||||
func (e *EventLog) clone() *EventLog {
|
||||
@ -183,6 +184,11 @@ func (e *EventLog) clone() *EventLog {
|
||||
}
|
||||
copy(out.Algs, e.Algs)
|
||||
copy(out.rawEvents, e.rawEvents)
|
||||
if e.specIDEvent != nil {
|
||||
dupe := *e.specIDEvent
|
||||
out.specIDEvent = &dupe
|
||||
}
|
||||
|
||||
return &out
|
||||
}
|
||||
|
||||
@ -528,6 +534,7 @@ func ParseEventLog(measurementLog []byte) (*EventLog, error) {
|
||||
// Note that this doesn't actually guarentee that events have SHA256
|
||||
// digests.
|
||||
parseFn = parseRawEvent2
|
||||
el.specIDEvent = specID
|
||||
} else {
|
||||
el.Algs = []HashAlg{HashSHA1}
|
||||
el.rawEvents = append(el.rawEvents, e)
|
||||
@ -741,3 +748,73 @@ func parseRawEvent2(r *bytes.Buffer, specID *specIDEvent) (event rawEvent, err e
|
||||
}
|
||||
return event, err
|
||||
}
|
||||
|
||||
// AppendEvents takes a series of TPM 2.0 event logs and combines
|
||||
// them into a single sequence of events with a single header.
|
||||
//
|
||||
// Additional logs must not use a digest algorithm which was not
|
||||
// present in the original log.
|
||||
func AppendEvents(base []byte, additional ...[]byte) ([]byte, error) {
|
||||
baseLog, err := ParseEventLog(base)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("base: %v", err)
|
||||
}
|
||||
if baseLog.specIDEvent == nil {
|
||||
return nil, errors.New("tpm 1.2 event logs cannot be combined")
|
||||
}
|
||||
|
||||
outBuff := make([]byte, len(base))
|
||||
copy(outBuff, base)
|
||||
out := bytes.NewBuffer(outBuff)
|
||||
|
||||
for i, l := range additional {
|
||||
log, err := ParseEventLog(l)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("log %d: %v", i, err)
|
||||
}
|
||||
if log.specIDEvent == nil {
|
||||
return nil, fmt.Errorf("log %d: cannot use tpm 1.2 event log as a source", i)
|
||||
}
|
||||
|
||||
algCheck:
|
||||
for _, alg := range log.specIDEvent.algs {
|
||||
for _, baseAlg := range baseLog.specIDEvent.algs {
|
||||
if baseAlg == alg {
|
||||
continue algCheck
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("log %d: cannot use digest (%+v) not present in base log", i, alg)
|
||||
}
|
||||
|
||||
for x, e := range log.rawEvents {
|
||||
// Serialize header (PCR index, event type, number of digests)
|
||||
binary.Write(out, binary.LittleEndian, rawEvent2Header{
|
||||
PCRIndex: uint32(e.index),
|
||||
Type: uint32(e.typ),
|
||||
})
|
||||
binary.Write(out, binary.LittleEndian, uint32(len(e.digests)))
|
||||
|
||||
// Serialize digests
|
||||
for _, d := range e.digests {
|
||||
var algID uint16
|
||||
switch d.hash {
|
||||
case crypto.SHA256:
|
||||
algID = uint16(HashSHA256)
|
||||
case crypto.SHA1:
|
||||
algID = uint16(HashSHA1)
|
||||
default:
|
||||
return nil, fmt.Errorf("log %d: event %d: unhandled hash function %v", i, x, d.hash)
|
||||
}
|
||||
|
||||
binary.Write(out, binary.LittleEndian, algID)
|
||||
out.Write(d.data)
|
||||
}
|
||||
|
||||
// Serialize event data
|
||||
binary.Write(out, binary.LittleEndian, uint32(len(e.data)))
|
||||
out.Write(e.data)
|
||||
}
|
||||
}
|
||||
|
||||
return out.Bytes(), nil
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ package attest
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
@ -337,3 +338,45 @@ func TestEBSVerifyWorkaround(t *testing.T) {
|
||||
t.Errorf("Verify() failed: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppendEvents(t *testing.T) {
|
||||
base, err := ioutil.ReadFile("testdata/ubuntu_2104_shielded_vm_no_secure_boot_eventlog")
|
||||
if err != nil {
|
||||
t.Fatalf("reading test data: %v", err)
|
||||
}
|
||||
|
||||
extraLog, err := base64.StdEncoding.DecodeString(`AAAAAAMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUAAABTcGVjIElEIEV2ZW50MDMAAAAAAAACAAEC
|
||||
AAAABAAUAAsAIAAACAAAAAYAAAACAAAABACX3UqVWDMNeg2Hkxyy6Q35wO4yBwsAVXbW4fKD8+xm
|
||||
Kv75L4ecBpvSR4d6bz+A7z1prUcKPuMrAQAACAISpgJpbWFfaGFzaD1zaGEyNTYgYXBwYXJtb3I9
|
||||
MSBwY2k9bm9hZXIsbm9hdHMgcHJpbnRrLmRldmttc2c9b24gc2xhYl9ub21lcmdlIGNvbnNvbGU9
|
||||
dHR5UzAsMTE1MjAwbjggY29uc29sZT10dHkwIGdsaW51eC1ib290LWltYWdlPTIwMjExMDI3LjAy
|
||||
LjAzIHF1aWV0IHNwbGFzaCBwbHltb3V0aC5pZ25vcmUtc2VyaWFsLWNvbnNvbGVzIGxzbT1sb2Nr
|
||||
ZG93bix5YW1hLGxvYWRwaW4sc2FmZXNldGlkLGludGVncml0eSxhcHBhcm1vcixzZWxpbnV4LHNt
|
||||
YWNrLHRvbW95byxicGYgcGFuaWM9MzAgaTkxNS5lbmFibGVfcHNyPTA=`)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
combined, err := AppendEvents(base, extraLog)
|
||||
if err != nil {
|
||||
t.Fatalf("CombineEventLogs() failed: %v", err)
|
||||
}
|
||||
|
||||
// Make sure the combined log parses successfully and has one more
|
||||
// event than the base log.
|
||||
parsedBase, err := ParseEventLog(base)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
parsed, err := ParseEventLog(combined)
|
||||
if err != nil {
|
||||
t.Fatalf("ParseEventLog(combined_log) failed: %v", err)
|
||||
}
|
||||
|
||||
if got, want := len(parsed.rawEvents), len(parsedBase.rawEvents)+1; got != want {
|
||||
t.Errorf("unexpected number of events in combined log: got %d, want %d", got, want)
|
||||
for i, e := range parsed.rawEvents {
|
||||
t.Logf("logs[%d] = %+v", i, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user