mirror of
https://github.com/nsacyber/HIRS.git
synced 2025-02-20 17:52:47 +00:00
commit
e598b78518
@ -11,3 +11,5 @@ HIRS_ACA_PORTAL_CONTAINER_PORT=80
|
||||
HIRS_ACA_HOSTNAME=hirsaca
|
||||
|
||||
HIRS_SUBNET=172.19.0.0/16
|
||||
|
||||
TEST_STATUS=0
|
@ -9,3 +9,4 @@ RUN mkdir paccor && pushd paccor && wget https://github.com/nsacyber/paccor/rele
|
||||
|
||||
# Install Software TPM for Provisioning
|
||||
RUN mkdir ibmtpm && pushd ibmtpm && wget https://downloads.sourceforge.net/project/ibmswtpm2/ibmtpm1332.tar.gz && tar -zxvf ibmtpm1332.tar.gz && cd src && make -j5 && popd
|
||||
|
||||
|
@ -12,3 +12,4 @@ RUN mkdir paccor && pushd paccor && wget https://github.com/nsacyber/paccor/rele
|
||||
|
||||
# Install Software TPM for Provisioning
|
||||
RUN mkdir tpm_emulator && pushd tpm_emulator && wget https://phoenixnap.dl.sourceforge.net/project/ibmswtpm/tpm4769tar.gz && tar -xzvf tpm4769tar.gz && pushd libtpm && ./autogen && ./configure && make && popd && pushd tpm && make -f makefile-tpm && popd && popd
|
||||
|
||||
|
@ -1,9 +1,8 @@
|
||||
version: "3.1"
|
||||
|
||||
services:
|
||||
aca:
|
||||
image: hirs/hirs-ci:aca
|
||||
container_name: hirs-aca
|
||||
container_name: hirs-aca1
|
||||
volumes:
|
||||
- ../../:/HIRS
|
||||
ports:
|
||||
@ -18,24 +17,23 @@ services:
|
||||
- ${HIRS_ACA_HOSTNAME}
|
||||
|
||||
tpmprovisioner:
|
||||
image: hirs/hirs-ci:tpmprovisioner
|
||||
container_name: hirs-aca-provisioner
|
||||
image: hirs/hirs-ci:tpm2provisioner
|
||||
container_name: hirs-provisioner1-tpm2
|
||||
depends_on:
|
||||
- aca
|
||||
volumes:
|
||||
- ../../:/HIRS
|
||||
entrypoint: /bin/bash -c
|
||||
command: [HIRS/.ci/setup/setup-tpmprovisioner.sh;
|
||||
HIRS/.ci/system-tests/systems-test-centos7-tpm1-2.sh]
|
||||
command: [tail -f /dev/null;]
|
||||
devices:
|
||||
- "/dev/mem:/dev/mem"
|
||||
cap_add:
|
||||
- sys_rawio
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
ipv4_address: ${HIRS_ACA_PROVISIONER_IP}
|
||||
ipv4_address: ${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
environment:
|
||||
- HIRS_ACA_PROVISIONER_IP=${HIRS_ACA_PROVISIONER_IP}
|
||||
- HIRS_ACA_PROVISIONER_TPM2_IP=${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
- TPM_ENABLED=${TPM_ENABLED}
|
||||
- IMA_ENABLED=${IMA_ENABLED}
|
||||
- HIRS_ACA_PORTAL_IP=${HIRS_ACA_PORTAL_IP}
|
@ -1,57 +0,0 @@
|
||||
---
|
||||
# Run YAML Lint to verify this file prior to check-in.
|
||||
|
||||
version: "3.1"
|
||||
|
||||
services:
|
||||
aca:
|
||||
image: hirs/hirs-ci:aca
|
||||
container_name: hirs-aca
|
||||
volumes:
|
||||
- ../../:/HIRS
|
||||
ports:
|
||||
- "${HIRS_ACA_PORTAL_PORT}:${HIRS_ACA_PORTAL_CONTAINER_PORT}"
|
||||
entrypoint: /bin/bash -c
|
||||
command: [HIRS/.ci/setup/setup-aca.sh]
|
||||
hostname: ${HIRS_ACA_HOSTNAME}
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
ipv4_address: ${HIRS_ACA_PORTAL_IP}
|
||||
aliases:
|
||||
- ${HIRS_ACA_HOSTNAME}
|
||||
|
||||
tpm2provisioner:
|
||||
image: hirs/hirs-ci:tpm2provisioner
|
||||
container_name: hirs-aca-provisioner-tpm2
|
||||
depends_on:
|
||||
- aca
|
||||
volumes:
|
||||
- ../../:/HIRS
|
||||
entrypoint: /bin/bash -c
|
||||
command: [HIRS/.ci/setup/setup-tpm2provisioner-base-delta-bad.sh;
|
||||
HIRS/.ci/system-tests/systems-test-centos7-tpm2-base-delta-bad.sh]
|
||||
devices:
|
||||
- "/dev/mem:/dev/mem"
|
||||
cap_add:
|
||||
- sys_rawio
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
ipv4_address: ${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
environment:
|
||||
- HIRS_ACA_PROVISIONER_TPM2_IP=${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
- TPM_ENABLED=${TPM_ENABLED}
|
||||
- IMA_ENABLED=${IMA_ENABLED}
|
||||
- HIRS_ACA_PORTAL_IP=${HIRS_ACA_PORTAL_IP}
|
||||
- HIRS_ACA_PORTAL_PORT=${HIRS_ACA_PORTAL_PORT}
|
||||
- HIRS_BROKER_PORT=${HIRS_BROKER_PORT}
|
||||
- HIRS_ACA_PORTAL_CONTAINER_PORT=${HIRS_ACA_PORTAL_CONTAINER_PORT}
|
||||
- HIRS_ACA_HOSTNAME=${HIRS_ACA_HOSTNAME}
|
||||
- HIRS_SUBNET=${HIRS_SUBNET}
|
||||
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
driver: bridge
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: ${HIRS_SUBNET}
|
@ -1,57 +0,0 @@
|
||||
---
|
||||
# Run YAML Lint to verify this file prior to check-in.
|
||||
|
||||
version: "3.1"
|
||||
|
||||
services:
|
||||
aca:
|
||||
image: hirs/hirs-ci:aca
|
||||
container_name: hirs-aca
|
||||
volumes:
|
||||
- ../../:/HIRS
|
||||
ports:
|
||||
- "${HIRS_ACA_PORTAL_PORT}:${HIRS_ACA_PORTAL_CONTAINER_PORT}"
|
||||
entrypoint: /bin/bash -c
|
||||
command: [HIRS/.ci/setup/setup-aca.sh]
|
||||
hostname: ${HIRS_ACA_HOSTNAME}
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
ipv4_address: ${HIRS_ACA_PORTAL_IP}
|
||||
aliases:
|
||||
- ${HIRS_ACA_HOSTNAME}
|
||||
|
||||
tpm2provisioner:
|
||||
image: hirs/hirs-ci:tpm2provisioner
|
||||
container_name: hirs-aca-provisioner-tpm2
|
||||
depends_on:
|
||||
- aca
|
||||
volumes:
|
||||
- ../../:/HIRS
|
||||
entrypoint: /bin/bash -c
|
||||
command: [HIRS/.ci/setup/setup-tpm2provisioner-base-delta-good.sh;
|
||||
HIRS/.ci/system-tests/systems-test-centos7-tpm2-base-delta-good.sh]
|
||||
devices:
|
||||
- "/dev/mem:/dev/mem"
|
||||
cap_add:
|
||||
- sys_rawio
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
ipv4_address: ${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
environment:
|
||||
- HIRS_ACA_PROVISIONER_TPM2_IP=${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
- TPM_ENABLED=${TPM_ENABLED}
|
||||
- IMA_ENABLED=${IMA_ENABLED}
|
||||
- HIRS_ACA_PORTAL_IP=${HIRS_ACA_PORTAL_IP}
|
||||
- HIRS_ACA_PORTAL_PORT=${HIRS_ACA_PORTAL_PORT}
|
||||
- HIRS_BROKER_PORT=${HIRS_BROKER_PORT}
|
||||
- HIRS_ACA_PORTAL_CONTAINER_PORT=${HIRS_ACA_PORTAL_CONTAINER_PORT}
|
||||
- HIRS_ACA_HOSTNAME=${HIRS_ACA_HOSTNAME}
|
||||
- HIRS_SUBNET=${HIRS_SUBNET}
|
||||
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
driver: bridge
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: ${HIRS_SUBNET}
|
@ -1,57 +0,0 @@
|
||||
---
|
||||
# Run YAML Lint to verify this file prior to check-in.
|
||||
|
||||
version: "3.1"
|
||||
|
||||
services:
|
||||
aca:
|
||||
image: hirs/hirs-ci:aca
|
||||
container_name: hirs-aca
|
||||
volumes:
|
||||
- ../../:/HIRS
|
||||
ports:
|
||||
- "${HIRS_ACA_PORTAL_PORT}:${HIRS_ACA_PORTAL_CONTAINER_PORT}"
|
||||
entrypoint: /bin/bash -c
|
||||
command: [HIRS/.ci/setup/setup-aca.sh]
|
||||
hostname: ${HIRS_ACA_HOSTNAME}
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
ipv4_address: ${HIRS_ACA_PORTAL_IP}
|
||||
aliases:
|
||||
- ${HIRS_ACA_HOSTNAME}
|
||||
|
||||
tpm2provisioner:
|
||||
image: hirs/hirs-ci:tpm2provisioner
|
||||
container_name: hirs-aca-provisioner-tpm2
|
||||
depends_on:
|
||||
- aca
|
||||
volumes:
|
||||
- ../../:/HIRS
|
||||
entrypoint: /bin/bash -c
|
||||
command: [HIRS/.ci/setup/setup-tpm2provisioner.sh;
|
||||
HIRS/.ci/system-tests/systems-test-centos7-tpm2.sh]
|
||||
devices:
|
||||
- "/dev/mem:/dev/mem"
|
||||
cap_add:
|
||||
- sys_rawio
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
ipv4_address: ${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
environment:
|
||||
- HIRS_ACA_PROVISIONER_TPM2_IP=${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
- TPM_ENABLED=${TPM_ENABLED}
|
||||
- IMA_ENABLED=${IMA_ENABLED}
|
||||
- HIRS_ACA_PORTAL_IP=${HIRS_ACA_PORTAL_IP}
|
||||
- HIRS_ACA_PORTAL_PORT=${HIRS_ACA_PORTAL_PORT}
|
||||
- HIRS_BROKER_PORT=${HIRS_BROKER_PORT}
|
||||
- HIRS_ACA_PORTAL_CONTAINER_PORT=${HIRS_ACA_PORTAL_CONTAINER_PORT}
|
||||
- HIRS_ACA_HOSTNAME=${HIRS_ACA_HOSTNAME}
|
||||
- HIRS_SUBNET=${HIRS_SUBNET}
|
||||
|
||||
networks:
|
||||
hirs_aca_system_tests:
|
||||
driver: bridge
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: ${HIRS_SUBNET}
|
@ -1,39 +0,0 @@
|
||||
# Add faulty components to the PACCOR generated JSON componentsFile.
|
||||
# This will be used to create a bad platform certificate.
|
||||
|
||||
from __future__ import print_function
|
||||
import json
|
||||
import pprint
|
||||
|
||||
try:
|
||||
badComponent = '00030003'
|
||||
pcDir = '/var/hirs/pc_generation/'
|
||||
paccorComponentsFile = 'componentsFile'
|
||||
pBaseJsonFileOut = 'PBaseCertB.componentlist.json'
|
||||
|
||||
# Open the paccor components file
|
||||
with open(pcDir + paccorComponentsFile, "r") as f:
|
||||
|
||||
# Load the info from the componentsFile
|
||||
data = json.load(f)
|
||||
print("The %s info:" % (paccorComponentsFile))
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(data)
|
||||
|
||||
# Find the component to use as "FAULTY"
|
||||
for component in data['COMPONENTS']:
|
||||
if component['COMPONENTCLASS']['COMPONENTCLASSVALUE'] == badComponent:
|
||||
print("Creating FAULTY component for: " + component['MODEL'])
|
||||
component['MODEL'] += "-FAULTY"
|
||||
print("New JSON value: " + component['MODEL'])
|
||||
break
|
||||
|
||||
# Write the new JSON file to be used in creating the PBaseCertB certificate.
|
||||
with open(pcDir + pBaseJsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, pBaseJsonFileOut))
|
||||
json.dump(data, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(data)
|
||||
|
||||
except Exception as ex:
|
||||
print("=== ERROR generating PBaseCertB JSON files: %s" % (ex.message))
|
@ -1,194 +0,0 @@
|
||||
# Create JSON files needed to create the following certificates:
|
||||
# PBaseCertA - Good Base
|
||||
# SIDeltaCertA1 - Good Delta
|
||||
# SIDeltaCertA2 - Bad Delta
|
||||
# SIDeltaCertA2Resolved - Good Delta
|
||||
# SIDeltaCertA3 - Good Delta
|
||||
# VARDeltaCertA1 - Good Delta
|
||||
# VARDeltaCertA2 - Bad Delta
|
||||
# VARDeltaCertA2Resolved - Good Delta
|
||||
|
||||
from __future__ import print_function
|
||||
from builtins import range
|
||||
import copy
|
||||
import json
|
||||
import pprint
|
||||
import sys
|
||||
|
||||
try:
|
||||
minNumOfComponents = 3
|
||||
maxComponentsToFind = 2
|
||||
numComponentsFound = 0
|
||||
delComponent1AtIndex = 0
|
||||
delComponent2AtIndex = 0
|
||||
badComponent = '00030003'
|
||||
pcDir = '/var/hirs/pc_generation/'
|
||||
paccorComponentsFile = 'componentsFile'
|
||||
pBaseJsonFileOut = 'PBaseCertA.componentlist.json'
|
||||
siDeltaA1JsonFileOut = 'SIDeltaCertA1.componentlist.json'
|
||||
siDeltaA2JsonFileOut = 'SIDeltaCertA2.componentlist.json'
|
||||
siDeltaA2ResolvedJsonFileOut = 'SIDeltaCertA2.resolved.componentlist.json'
|
||||
siDeltaA3JsonFileOut = 'SIDeltaCertA3.componentlist.json'
|
||||
varDeltaA1JsonFileOut = 'VARDeltaCertA1.componentlist.json'
|
||||
varDeltaA2JsonFileOut = 'VARDeltaCertA2.componentlist.json'
|
||||
varDeltaA2ResolvedJsonFileOut = 'VARDeltaCertA2.resolved.componentlist.json'
|
||||
|
||||
# Open the paccor components file
|
||||
with open(pcDir + paccorComponentsFile, "r") as f:
|
||||
|
||||
# Load the info from the componentsFile
|
||||
data = json.load(f)
|
||||
print("The %s info:" % (paccorComponentsFile))
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(data)
|
||||
|
||||
# Initialize the base/delta structures
|
||||
pBaseComponentDict = copy.deepcopy(data)
|
||||
siDeltaA1ComponentDict = copy.deepcopy(data)
|
||||
siDeltaA2ComponentDict = copy.deepcopy(data)
|
||||
siDeltaA2ResolvedComponentDict = copy.deepcopy(data)
|
||||
siDeltaA3ComponentDict = copy.deepcopy(data)
|
||||
varDeltaA1ComponentDict = copy.deepcopy(data)
|
||||
numOfComponents = len(data['COMPONENTS'])
|
||||
|
||||
print("Total number of components: %d." % numOfComponents)
|
||||
|
||||
# Need at least three components to run system tests
|
||||
if numOfComponents < minNumOfComponents:
|
||||
raise Exception("Need at least %d components to run system tests!" % minNumOfComponents)
|
||||
else:
|
||||
print("Splitting into 1 base and multiple delta JSON files to generate the certs...")
|
||||
|
||||
# Setup good base. Find the first two components that have a Serial included.
|
||||
for i in range(len(pBaseComponentDict['COMPONENTS'])):
|
||||
print("Current component[%d]:" % i)
|
||||
pp.pprint(pBaseComponentDict['COMPONENTS'][i])
|
||||
if 'SERIAL' in pBaseComponentDict['COMPONENTS'][i]:
|
||||
print("SERIAL found: %s" % pBaseComponentDict['COMPONENTS'][i]['SERIAL'])
|
||||
numComponentsFound += 1
|
||||
else:
|
||||
print("SERIAL not found.")
|
||||
|
||||
tmpComponent = copy.deepcopy(pBaseComponentDict['COMPONENTS'][i])
|
||||
|
||||
# Check if we found 2 components
|
||||
if numComponentsFound == 1:
|
||||
delComponent1AtIndex = i
|
||||
|
||||
# Use component for the SIDeltaA1
|
||||
del siDeltaA1ComponentDict['COMPONENTS'][:]
|
||||
siDeltaA1ComponentDict['COMPONENTS'].append(tmpComponent)
|
||||
siDeltaA1ComponentDict['COMPONENTS'][0]['STATUS'] = "ADDED"
|
||||
|
||||
elif numComponentsFound == 2:
|
||||
delComponent2AtIndex = i
|
||||
|
||||
# Use component for the VARDeltaA1
|
||||
del varDeltaA1ComponentDict['COMPONENTS'][:]
|
||||
varDeltaA1ComponentDict['COMPONENTS'].append(tmpComponent)
|
||||
varDeltaA1ComponentDict['COMPONENTS'][0]['STATUS'] = "ADDED"
|
||||
break
|
||||
|
||||
# Raise exception if we don't have two components with serial numbers.
|
||||
if numComponentsFound < 2:
|
||||
raise Exception("Need at least 2 components with SERIAL NUMBERS to run system tests!")
|
||||
else:
|
||||
print ("Found at least 2 components with SERIAL NUMBERS...running system tests!!")
|
||||
|
||||
# Delete the two components from pBaseComponentDict
|
||||
del pBaseComponentDict['COMPONENTS'][delComponent2AtIndex]
|
||||
del pBaseComponentDict['COMPONENTS'][delComponent1AtIndex]
|
||||
|
||||
# Setup bad and good delta...
|
||||
# Create SIDeltaA2 with one component, MODEL as "-FAULTY", STATUS as "MODIFIED"
|
||||
# Create SIDeltaA2_resolved with one component, MODEL as "-FAULTY", STATUS as "REMOVED"
|
||||
del siDeltaA2ComponentDict['COMPONENTS'][:]
|
||||
del siDeltaA2ResolvedComponentDict['COMPONENTS'][:]
|
||||
for component in data['COMPONENTS']:
|
||||
if component['COMPONENTCLASS']['COMPONENTCLASSVALUE'] == badComponent:
|
||||
siDeltaA2Component = copy.copy(component)
|
||||
siDeltaA2Component['STATUS'] = "MODIFIED"
|
||||
siDeltaA2Component['MODEL'] += "-FAULTY"
|
||||
siDeltaA2ComponentDict['COMPONENTS'].append(siDeltaA2Component)
|
||||
|
||||
siDeltaA2ResolvedComponent = copy.copy(siDeltaA2Component)
|
||||
siDeltaA2ResolvedComponent['STATUS'] = "REMOVED"
|
||||
siDeltaA2ResolvedComponentDict['COMPONENTS'].append(siDeltaA2ResolvedComponent)
|
||||
break
|
||||
|
||||
# Setup good delta...
|
||||
# Create SIDeltaA3 with component "REMOVED" from SIDeltaA1
|
||||
del siDeltaA3ComponentDict['COMPONENTS'][:]
|
||||
siDeltaA3ComponentDict['COMPONENTS']= copy.deepcopy(siDeltaA1ComponentDict['COMPONENTS'])
|
||||
siDeltaA3ComponentDict['COMPONENTS'][0]['STATUS'] = "REMOVED"
|
||||
|
||||
# Setup bad delta...
|
||||
# Create VARDeltaA2 with a component that is not in the Base
|
||||
varDeltaA2ComponentDict = copy.deepcopy(varDeltaA1ComponentDict)
|
||||
varDeltaA2ComponentDict['COMPONENTS'][0]['MODEL'] = "This component is not in Base"
|
||||
varDeltaA2ComponentDict['COMPONENTS'][0]['SERIAL'] = "1234567"
|
||||
varDeltaA2ComponentDict['COMPONENTS'][0]['STATUS'] = "ADDED"
|
||||
|
||||
# Setup good delta...
|
||||
# Create VARDeltaA2_resolved
|
||||
varDeltaA2ResolvedComponentDict = copy.deepcopy(varDeltaA2ComponentDict)
|
||||
varDeltaA2ResolvedComponentDict['COMPONENTS'][0]['STATUS'] = "REMOVED"
|
||||
|
||||
# Write the new JSON file to be used in creating the PBaseCertA certificate.
|
||||
with open(pcDir + pBaseJsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, pBaseJsonFileOut))
|
||||
json.dump(pBaseComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(pBaseComponentDict)
|
||||
|
||||
# Write the new JSON file to be used in creating the SIDeltaA1 certificate.
|
||||
with open(pcDir + siDeltaA1JsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, siDeltaA1JsonFileOut))
|
||||
json.dump(siDeltaA1ComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(siDeltaA1ComponentDict)
|
||||
|
||||
# Write the new JSON file to be used in creating the SIDeltaA2 certificate.
|
||||
with open(pcDir + siDeltaA2JsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, siDeltaA2JsonFileOut))
|
||||
json.dump(siDeltaA2ComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(siDeltaA2ComponentDict)
|
||||
|
||||
# Write the new JSON file to be used in creating the SIDeltaA2Resolved certificate.
|
||||
with open(pcDir + siDeltaA2ResolvedJsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, siDeltaA2ResolvedJsonFileOut))
|
||||
json.dump(siDeltaA2ResolvedComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(siDeltaA2ResolvedComponentDict)
|
||||
|
||||
# Write the new JSON file to be used in creating the SIDeltaA3 certificate.
|
||||
with open(pcDir + siDeltaA3JsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, siDeltaA3JsonFileOut))
|
||||
json.dump(siDeltaA3ComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(siDeltaA3ComponentDict)
|
||||
|
||||
# Write the new JSON file to be used in creating the VARDeltaA1 certificate.
|
||||
with open(pcDir + varDeltaA1JsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, varDeltaA1JsonFileOut))
|
||||
json.dump(varDeltaA1ComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(varDeltaA1ComponentDict)
|
||||
|
||||
# Write the new JSON file to be used in creating the VARDeltaA2 certificate.
|
||||
with open(pcDir + varDeltaA2JsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, varDeltaA2JsonFileOut))
|
||||
json.dump(varDeltaA2ComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(varDeltaA2ComponentDict)
|
||||
|
||||
# Write the new JSON file to be used in creating the VARDeltaA2Resolved certificate.
|
||||
with open(pcDir + varDeltaA2ResolvedJsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, varDeltaA2ResolvedJsonFileOut))
|
||||
json.dump(varDeltaA2ResolvedComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(varDeltaA2ResolvedComponentDict)
|
||||
|
||||
except Exception as ex:
|
||||
print("=== ERROR generating PBaseCertA JSON files: %s" % (ex.message))
|
@ -1,94 +0,0 @@
|
||||
# Create JSON files needed to create the following certificates:
|
||||
# SIDeltaCertB1 - Bad Delta
|
||||
# VARDeltaCertB1 - Good Delta
|
||||
|
||||
from __future__ import print_function
|
||||
import copy
|
||||
import json
|
||||
import pprint
|
||||
import sys
|
||||
|
||||
try:
|
||||
pcDir = '/var/hirs/pc_generation/'
|
||||
pBaseJsonFileIn = 'PBaseCertB.componentlist.json'
|
||||
siDeltaB1JsonFileOut = 'SIDeltaCertB1.componentlist.json'
|
||||
varDeltaB1JsonFileOut = 'VARDeltaCertB1.componentlist.json'
|
||||
|
||||
# Open the PBaseCertB components file
|
||||
with open(pcDir + pBaseJsonFileIn, "r") as f:
|
||||
|
||||
# Load the info from the componentsFile
|
||||
data = json.load(f)
|
||||
print("The %s info:" % (pBaseJsonFileIn))
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(data)
|
||||
|
||||
# Initialize the structures
|
||||
siDeltaB1ComponentDict = copy.deepcopy(data)
|
||||
varDeltaB1ComponentDict = copy.deepcopy(data)
|
||||
|
||||
# Remove all the components
|
||||
del siDeltaB1ComponentDict['COMPONENTS'][:]
|
||||
del varDeltaB1ComponentDict['COMPONENTS'][:]
|
||||
|
||||
# Find "FAULTY" component from original data; and create the delta JSON files
|
||||
for component in data['COMPONENTS']:
|
||||
if component['MODEL'].__contains__("-FAULTY"):
|
||||
|
||||
print("Found Faulty Component:")
|
||||
pp.pprint(component)
|
||||
|
||||
# Make copy of component for SIDeltaCertB1
|
||||
siDeltaB1Component = copy.copy(component)
|
||||
|
||||
# Change status to be "MODIFIED"
|
||||
print("Updated status to be MODIFIED...")
|
||||
siDeltaB1Component['STATUS'] = "MODIFIED"
|
||||
|
||||
# Add to component SIDeltaCertB1 list
|
||||
print("Adding component to %s list..." % (siDeltaB1JsonFileOut))
|
||||
siDeltaB1ComponentDict['COMPONENTS'].append(siDeltaB1Component)
|
||||
|
||||
# Make copy of component for VARDeltaCertB1
|
||||
varDeltaB1Component_1 = copy.copy(component)
|
||||
|
||||
# Change status to be "REMOVED"
|
||||
print("Updated status to be REMOVED...")
|
||||
varDeltaB1Component_1['STATUS'] = "REMOVED"
|
||||
|
||||
# Add to component VARDeltaCertB1 list
|
||||
print("Adding component to %s list..." % (varDeltaB1JsonFileOut))
|
||||
varDeltaB1ComponentDict['COMPONENTS'].append(varDeltaB1Component_1)
|
||||
|
||||
# Make copy of component for VARDeltaCertB1
|
||||
varDeltaB1Component_2 = copy.copy(component)
|
||||
|
||||
# Change status to be "ADDED"
|
||||
print("Updated status to be ADDED...")
|
||||
varDeltaB1Component_2['STATUS'] = "ADDED"
|
||||
|
||||
# Remove "-FAULTY" substring in the model
|
||||
varDeltaB1Component_2['MODEL'] = varDeltaB1Component_2['MODEL'].replace('-FAULTY', '')
|
||||
print("Removed -FAULTY from component...")
|
||||
|
||||
# Add to component VARDeltaCertB1 list
|
||||
print("Adding component to %s list..." % (varDeltaB1JsonFileOut))
|
||||
varDeltaB1ComponentDict['COMPONENTS'].append(varDeltaB1Component_2)
|
||||
break
|
||||
|
||||
# Write the new JSON file to be used in creating the SIDeltaCertB1 certificate
|
||||
with open(pcDir + siDeltaB1JsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, siDeltaB1JsonFileOut))
|
||||
json.dump(siDeltaB1ComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(siDeltaB1ComponentDict)
|
||||
|
||||
# Write the new JSON file to be used in creating the VARDeltaCertB1 certificate
|
||||
with open(pcDir + varDeltaB1JsonFileOut, 'w') as outfile:
|
||||
print("Writing %s%s ..." % (pcDir, varDeltaB1JsonFileOut))
|
||||
json.dump(varDeltaB1ComponentDict, outfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
pp.pprint(varDeltaB1ComponentDict)
|
||||
|
||||
except Exception as ex:
|
||||
print("=== ERROR generating PBaseCertB JSON files: %s" % (ex.message))
|
@ -1,198 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to setup the TPM 2.0 Provisioner Docker Image for System Tests Base/Delta(Bad)
|
||||
set -e
|
||||
|
||||
# Wait for ACA to boot
|
||||
echo "Waiting for ACA to spin up..."
|
||||
until [ "`curl --silent --connect-timeout 1 -I -k https://${HIRS_ACA_PORTAL_IP}:${HIRS_ACA_PORTAL_PORT}/HIRS_AttestationCAPortal | grep '302 Found'`" != "" ]; do
|
||||
:
|
||||
done
|
||||
echo "ACA is up!"
|
||||
|
||||
# Function to install TPM 2.0 Provisioner packages
|
||||
function InstallProvisioner {
|
||||
echo "===========Installing TPM 2.0 Provisioner Packages...==========="
|
||||
|
||||
pushd /HIRS
|
||||
if [ ! -d package/rpm/RPMS ]; then
|
||||
./package/package.centos.sh
|
||||
fi
|
||||
yum install -y package/rpm/RPMS/x86_64/HIRS_Provisioner_TPM_2_0*.el7.x86_64.rpm
|
||||
popd
|
||||
}
|
||||
|
||||
# Function to initialize the TPM 2.0 Emulator with a bad base certificate
|
||||
function InitTpm2Emulator {
|
||||
echo "===========Initializing TPM 2.0 Emulator with bad base certificate...==========="
|
||||
|
||||
mkdir -p /var/run/dbus
|
||||
if [ -e /var/run/dbus/pid ]; then
|
||||
rm /var/run/dbus/pid
|
||||
fi
|
||||
|
||||
if [ -e /var/run/dbus/system_bus_socket ]; then
|
||||
rm /var/run/dbus/system_bus_socket
|
||||
fi
|
||||
|
||||
# Start the DBus
|
||||
dbus-daemon --fork --system
|
||||
echo "DBus started"
|
||||
|
||||
# Give DBus time to start up
|
||||
sleep 5
|
||||
|
||||
/ibmtpm/src/./tpm_server &
|
||||
echo "TPM Emulator started"
|
||||
|
||||
# Give tpm_server time to start and register on the DBus
|
||||
sleep 5
|
||||
|
||||
tpm2-abrmd -t socket &
|
||||
echo "TPM2-Abrmd started"
|
||||
|
||||
# Give ABRMD time to start and register on the DBus
|
||||
sleep 5
|
||||
|
||||
# Certificates
|
||||
ek_cert="/HIRS/.ci/setup/certs/ek_cert.der"
|
||||
ca_key="/HIRS/.ci/setup/certs/ca.key"
|
||||
ca_cert="/HIRS/.ci/setup/certs/ca.crt"
|
||||
platform_cert="PBaseCertB.der"
|
||||
si_delta_cert_B1="SIDeltaCertB1.der"
|
||||
var_delta_cert_B1="VARDeltaCertB1.der"
|
||||
|
||||
# PACCOR directory
|
||||
PC_DIR=/var/hirs/pc_generation
|
||||
mkdir -p $PC_DIR
|
||||
|
||||
echo "Running PACCOR to generate local component information..."
|
||||
# Use specific PACCOR script for system testing.
|
||||
# Will provide default component SN#s when needed.
|
||||
cp -f /HIRS/.ci/system-tests/allcomponents_hirs_system_tests.sh /opt/paccor/scripts/allcomponents.sh
|
||||
/opt/paccor/scripts/allcomponents.sh > $PC_DIR/componentsFile
|
||||
echo
|
||||
|
||||
# Add faulty component JSON files needed to generate the certificates
|
||||
python /HIRS/.ci/setup/addFaultyComponentsForPBaseCertB.py
|
||||
echo
|
||||
|
||||
# Generate certificates in the order they'll be used in the system tests.
|
||||
# And stager the begin dates properly (the -b option for the /opt/paccor/bin/signer)
|
||||
|
||||
# Generate the bad base certificate
|
||||
echo "Generating certificates..."
|
||||
echo "Generating $platform_cert..."
|
||||
/opt/paccor/scripts/referenceoptions.sh > $PC_DIR/optionsFile
|
||||
/opt/paccor/scripts/otherextensions.sh > $PC_DIR/extensionsFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/PBaseCertB.componentlist.json -p $PC_DIR/optionsFile -e $ek_cert -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/PBaseCertB.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180101 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -f $PC_DIR/$platform_cert
|
||||
echo "Done"
|
||||
|
||||
# Create good delta component and create SIDeltaCertB1.componentlist.json
|
||||
python /HIRS/.ci/setup/createDeltaComponentsForPBaseCertB.py
|
||||
echo
|
||||
|
||||
# Generate the SIDeltaCertB1certificate
|
||||
echo "Generating $si_delta_cert_B1..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/SIDeltaCertB1.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$platform_cert -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/SIDeltaCertB1.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180201 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$platform_cert -f $PC_DIR/$si_delta_cert_B1
|
||||
echo "Done"
|
||||
|
||||
# Generate the VARDeltaCertB1 certificate
|
||||
echo "Generating $var_delta_cert_B1..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/VARDeltaCertB1.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$platform_cert -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/VARDeltaCertB1.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180301 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$platform_cert -f $PC_DIR/$var_delta_cert_B1
|
||||
echo "Done"
|
||||
|
||||
# Release EK nvram
|
||||
if tpm2_nvlist | grep -q 0x1c00002; then
|
||||
echo "Released NVRAM for EK."
|
||||
tpm2_nvrelease -x 0x1c00002 -a 0x40000001
|
||||
fi
|
||||
|
||||
# Define nvram space to enable loading of EK cert (-x NV Index, -a handle to
|
||||
# authorize [0x40000001 = ownerAuth handle], -s size [defaults to 2048], -t
|
||||
# specifies attribute value in publicInfo struct
|
||||
# [0x2000A = ownerread|ownerwrite|policywrite])
|
||||
size=$(cat $ek_cert | wc -c)
|
||||
echo "Define NVRAM location for EK cert of size $size."
|
||||
tpm2_nvdefine -x 0x1c00002 -a 0x40000001 -t 0x2000A -s $size
|
||||
|
||||
# Load key into TPM nvram
|
||||
echo "Loading EK cert $ek_cert into NVRAM."
|
||||
tpm2_nvwrite -x 0x1c00002 -a 0x40000001 $ek_cert
|
||||
|
||||
# Release PC nvram
|
||||
if tpm2_nvlist | grep -q 0x1c90000; then
|
||||
echo "Released NVRAM for PC."
|
||||
tpm2_nvrelease -x 0x1c90000 -a 0x40000001
|
||||
fi
|
||||
|
||||
# Store the platform certificate in the TPM's NVRAM
|
||||
size=$(cat $PC_DIR/$platform_cert | wc -c)
|
||||
echo "Define NVRAM location for PC cert of size $size."
|
||||
tpm2_nvdefine -x 0x1c90000 -a 0x40000001 -t 0x2000A -s $size
|
||||
|
||||
echo "Loading PC cert $PC_DIR/$platform_cert into NVRAM."
|
||||
tpm2_nvwrite -x 0x1c90000 -a 0x40000001 $PC_DIR/$platform_cert
|
||||
|
||||
echo "===========TPM 2.0 Emulator Initialization Complete!==========="
|
||||
|
||||
# Set Logging to INFO Level
|
||||
sed -i "s/WARN/INFO/" /etc/hirs/TPM2_Provisioner/log4cplus_config.ini
|
||||
}
|
||||
|
||||
# Function to update the hirs-site.config file
|
||||
function UpdateHirsSiteConfigFile {
|
||||
HIRS_SITE_CONFIG="/etc/hirs/hirs-site.config"
|
||||
|
||||
echo ""
|
||||
echo "===========Updating ${HIRS_SITE_CONFIG}, using values from /HIRS/.ci/docker/.env file...==========="
|
||||
cat /HIRS/.ci/docker/.env
|
||||
|
||||
cat <<DEFAULT_SITE_CONFIG_FILE > $HIRS_SITE_CONFIG
|
||||
#*******************************************
|
||||
#* HIRS site configuration properties file
|
||||
#*******************************************
|
||||
CLIENT_HOSTNAME=${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
TPM_ENABLED=${TPM_ENABLED}
|
||||
IMA_ENABLED=${IMA_ENABLED}
|
||||
|
||||
# Site-specific configuration
|
||||
ATTESTATION_CA_FQDN=${HIRS_ACA_HOSTNAME}
|
||||
ATTESTATION_CA_PORT=${HIRS_ACA_PORTAL_PORT}
|
||||
BROKER_FQDN=${HIRS_ACA_PORTAL_IP}
|
||||
BROKER_PORT=${HIRS_BROKER_PORT}
|
||||
PORTAL_FQDN=${HIRS_ACA_PORTAL_IP}
|
||||
PORTAL_PORT=${HIRS_ACA_PORTAL_PORT}
|
||||
|
||||
DEFAULT_SITE_CONFIG_FILE
|
||||
|
||||
echo "===========New HIRS Config File==========="
|
||||
cat /etc/hirs/hirs-site.config
|
||||
}
|
||||
|
||||
# Install packages
|
||||
InstallProvisioner
|
||||
|
||||
# Install TPM 2.0 Emulator
|
||||
InitTpm2Emulator
|
||||
|
||||
# Update the hirs-site.config file
|
||||
UpdateHirsSiteConfigFile
|
||||
|
||||
# Set alias to use python3
|
||||
echo "===========Python Version==========="
|
||||
python3 --version
|
||||
alias python='/usr/bin/python3.6'
|
||||
alias
|
||||
|
||||
echo ""
|
||||
echo "TPM 2.0 Emulator NV RAM list"
|
||||
tpm2_nvlist
|
||||
|
||||
echo ""
|
||||
echo "===========HIRS ACA TPM 2.0 Provisioner Setup Complete!==========="
|
@ -1,237 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to setup the TPM 2.0 Provisioner Docker Image for System Tests Base/Delta(Good)
|
||||
set -e
|
||||
|
||||
# Wait for ACA to boot
|
||||
echo "Waiting for ACA to spin up..."
|
||||
until [ "`curl --silent --connect-timeout 1 -I -k https://${HIRS_ACA_PORTAL_IP}:${HIRS_ACA_PORTAL_PORT}/HIRS_AttestationCAPortal | grep '302 Found'`" != "" ]; do
|
||||
:
|
||||
done
|
||||
echo "ACA is up!"
|
||||
|
||||
# Function to install TPM 2.0 Provisioner packages
|
||||
function InstallProvisioner {
|
||||
echo "===========Installing TPM 2.0 Provisioner Packages...==========="
|
||||
|
||||
pushd /HIRS
|
||||
if [ ! -d package/rpm/RPMS ]; then
|
||||
./package/package.centos.sh
|
||||
fi
|
||||
yum install -y package/rpm/RPMS/x86_64/HIRS_Provisioner_TPM_2_0*.el7.x86_64.rpm
|
||||
popd
|
||||
}
|
||||
|
||||
# Function to initialize the TPM 2.0 Emulator with a good base certificate
|
||||
function InitTpm2Emulator {
|
||||
echo "===========Initializing TPM 2.0 Emulator with good base certificate...==========="
|
||||
|
||||
mkdir -p /var/run/dbus
|
||||
if [ -e /var/run/dbus/pid ]; then
|
||||
rm /var/run/dbus/pid
|
||||
fi
|
||||
|
||||
if [ -e /var/run/dbus/system_bus_socket ]; then
|
||||
rm /var/run/dbus/system_bus_socket
|
||||
fi
|
||||
|
||||
# Start the DBus
|
||||
dbus-daemon --fork --system
|
||||
echo "DBus started"
|
||||
|
||||
# Give DBus time to start up
|
||||
sleep 5
|
||||
|
||||
/ibmtpm/src/./tpm_server &
|
||||
echo "TPM Emulator started"
|
||||
|
||||
# Give tpm_server time to start and register on the DBus
|
||||
sleep 5
|
||||
|
||||
tpm2-abrmd -t socket &
|
||||
echo "TPM2-Abrmd started"
|
||||
|
||||
# Give ABRMD time to start and register on the DBus
|
||||
sleep 5
|
||||
|
||||
# Certificates
|
||||
ek_cert="/HIRS/.ci/setup/certs/ek_cert.der"
|
||||
ca_key="/HIRS/.ci/setup/certs/ca.key"
|
||||
ca_cert="/HIRS/.ci/setup/certs/ca.crt"
|
||||
pBase_certA="PBaseCertA.der"
|
||||
pBase_certB="PBaseCertB.der"
|
||||
si_delta_cert_A1="SIDeltaCertA1.der"
|
||||
si_delta_cert_A2="SIDeltaCertA2.der"
|
||||
si_delta_cert_A2_resolved="SIDeltaCertA2_resolved.der"
|
||||
si_delta_cert_A3="SIDeltaCertA3.der"
|
||||
var_delta_cert_A1="VARDeltaCertA1.der"
|
||||
var_delta_cert_A2="VARDeltaCertA2.der"
|
||||
var_delta_cert_A2_resolved="VARDeltaCertA2_resolved.der"
|
||||
|
||||
# PACCOR directory
|
||||
PC_DIR=/var/hirs/pc_generation
|
||||
mkdir -p $PC_DIR
|
||||
|
||||
echo "Running PACCOR to generate local component information..."
|
||||
# Use specific PACCOR script for system testing.
|
||||
# Will provide default component SN#s when needed.
|
||||
cp -f /HIRS/.ci/system-tests/allcomponents_hirs_system_tests.sh /opt/paccor/scripts/allcomponents.sh
|
||||
/opt/paccor/scripts/allcomponents.sh > $PC_DIR/componentsFile
|
||||
|
||||
# Split into JSON files needed to generate the certificates
|
||||
python /HIRS/.ci/setup/createDeltaComponentsForPBaseCertA.py
|
||||
echo
|
||||
|
||||
# Generate certificates in the order they'll be used in the system tests.
|
||||
# And stager the begin dates properly (the -b option for the /opt/paccor/bin/signer)
|
||||
echo "Generating certificates..."
|
||||
echo "Generating $pBase_certA..."
|
||||
/opt/paccor/scripts/referenceoptions.sh > $PC_DIR/optionsFile
|
||||
/opt/paccor/scripts/otherextensions.sh > $PC_DIR/extensionsFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/PBaseCertA.componentlist.json -p $PC_DIR/optionsFile -e $ek_cert -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/PBaseCertA.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180101 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -f $PC_DIR/$pBase_certA
|
||||
echo "Done"
|
||||
|
||||
# Generate the PBaseCertB certificate. Just need to copy from PBaseCertA.
|
||||
echo "Generating $pBase_certB..."
|
||||
cp $PC_DIR/$pBase_certA $PC_DIR/$pBase_certB
|
||||
echo "Done"
|
||||
|
||||
# Generate the SIDeltaCertA1 certificate
|
||||
echo "Generating $si_delta_cert_A1, using $pBase_certA..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/SIDeltaCertA1.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$pBase_certA -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/SIDeltaCertA1.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180201 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$pBase_certA -f $PC_DIR/$si_delta_cert_A1
|
||||
echo "Done"
|
||||
|
||||
# Generate the VARDeltaCertA1 certificate
|
||||
echo "Generating $var_delta_cert_A1, using $pBase_certA..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/VARDeltaCertA1.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$pBase_certA -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/VARDeltaCertA1.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180301 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$pBase_certA -f $PC_DIR/$var_delta_cert_A1
|
||||
echo "Done"
|
||||
|
||||
# Generate the SIDeltaCertA2 certificate
|
||||
echo "Generating $si_delta_cert_A2, using $pBase_certA..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/SIDeltaCertA2.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$pBase_certA -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/SIDeltaCertA2.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180401 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$pBase_certA -f $PC_DIR/$si_delta_cert_A2
|
||||
echo "Done"
|
||||
|
||||
# Generate the SIDeltaCertA2_resolved certificate
|
||||
echo "Generating $si_delta_cert_A2_resolved, using $pBase_certA..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/SIDeltaCertA2.resolved.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$pBase_certA -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/SIDeltaCertA2.resolved.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180501 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$pBase_certA -f $PC_DIR/$si_delta_cert_A2_resolved
|
||||
echo "Done"
|
||||
|
||||
# Generate the VARDeltaCertA2 certificate
|
||||
echo "Generating $var_delta_cert_A2, using $pBase_certA..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/VARDeltaCertA2.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$pBase_certA -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/VARDeltaCertA2.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180601 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$pBase_certA -f $PC_DIR/$var_delta_cert_A2
|
||||
echo "Done"
|
||||
|
||||
# Generate the VARDeltaCertA2_resolved certificate
|
||||
echo "Generating $var_delta_cert_A2_resolved, using $pBase_certA..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/VARDeltaCertA2.resolved.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$pBase_certA -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/VARDeltaCertA2.resolved.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180701 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$pBase_certA -f $PC_DIR/$var_delta_cert_A2_resolved
|
||||
echo "Done"
|
||||
|
||||
# Generate the SIDeltaCertA3 certificate
|
||||
echo "Generating $si_delta_cert_A3, using $si_delta_cert_A1 as Base..."
|
||||
rm -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/observer -c $PC_DIR/SIDeltaCertA3.componentlist.json -p $PC_DIR/optionsFile -e $PC_DIR/$si_delta_cert_A1 -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -c $PC_DIR/SIDeltaCertA3.componentlist.json -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180801 -a 20280101 -N $RANDOM -k $ca_key -P $ca_cert -e $PC_DIR/$si_delta_cert_A1 -f $PC_DIR/$si_delta_cert_A3
|
||||
echo "Done"
|
||||
|
||||
# Release EK nvram
|
||||
if tpm2_nvlist | grep -q 0x1c00002; then
|
||||
echo "Released NVRAM for EK."
|
||||
tpm2_nvrelease -x 0x1c00002 -a 0x40000001
|
||||
fi
|
||||
|
||||
# Define nvram space to enable loading of EK cert (-x NV Index, -a handle to
|
||||
# authorize [0x40000001 = ownerAuth handle], -s size [defaults to 2048], -t
|
||||
# specifies attribute value in publicInfo struct
|
||||
# [0x2000A = ownerread|ownerwrite|policywrite])
|
||||
size=$(cat $ek_cert | wc -c)
|
||||
echo "Define NVRAM location for EK cert of size $size."
|
||||
tpm2_nvdefine -x 0x1c00002 -a 0x40000001 -t 0x2000A -s $size
|
||||
|
||||
# Load key into TPM nvram
|
||||
echo "Loading EK cert $ek_cert into NVRAM."
|
||||
tpm2_nvwrite -x 0x1c00002 -a 0x40000001 $ek_cert
|
||||
|
||||
# Release PC nvram
|
||||
if tpm2_nvlist | grep -q 0x1c90000; then
|
||||
echo "Released NVRAM for PC."
|
||||
tpm2_nvrelease -x 0x1c90000 -a 0x40000001
|
||||
fi
|
||||
|
||||
# Store the platform certificate in the TPM's NVRAM
|
||||
size=$(cat $PC_DIR/$pBase_certA | wc -c)
|
||||
echo "Define NVRAM location for PC cert of size $size."
|
||||
tpm2_nvdefine -x 0x1c90000 -a 0x40000001 -t 0x2000A -s $size
|
||||
|
||||
echo "Loading PC cert $PC_DIR/$pBase_certA into NVRAM."
|
||||
tpm2_nvwrite -x 0x1c90000 -a 0x40000001 $PC_DIR/$pBase_certA
|
||||
|
||||
echo "===========TPM 2.0 Emulator Initialization Complete!==========="
|
||||
|
||||
# Set Logging to INFO Level
|
||||
sed -i "s/WARN/INFO/" /etc/hirs/TPM2_Provisioner/log4cplus_config.ini
|
||||
}
|
||||
|
||||
# Function to update the hirs-site.config file
|
||||
function UpdateHirsSiteConfigFile {
|
||||
HIRS_SITE_CONFIG="/etc/hirs/hirs-site.config"
|
||||
|
||||
echo ""
|
||||
echo "===========Updating ${HIRS_SITE_CONFIG}, using values from /HIRS/.ci/docker/.env file...==========="
|
||||
cat /HIRS/.ci/docker/.env
|
||||
|
||||
cat <<DEFAULT_SITE_CONFIG_FILE > $HIRS_SITE_CONFIG
|
||||
#*******************************************
|
||||
#* HIRS site configuration properties file
|
||||
#*******************************************
|
||||
CLIENT_HOSTNAME=${HIRS_ACA_PROVISIONER_TPM2_IP}
|
||||
TPM_ENABLED=${TPM_ENABLED}
|
||||
IMA_ENABLED=${IMA_ENABLED}
|
||||
|
||||
# Site-specific configuration
|
||||
ATTESTATION_CA_FQDN=${HIRS_ACA_HOSTNAME}
|
||||
ATTESTATION_CA_PORT=${HIRS_ACA_PORTAL_PORT}
|
||||
BROKER_FQDN=${HIRS_ACA_PORTAL_IP}
|
||||
BROKER_PORT=${HIRS_BROKER_PORT}
|
||||
PORTAL_FQDN=${HIRS_ACA_PORTAL_IP}
|
||||
PORTAL_PORT=${HIRS_ACA_PORTAL_PORT}
|
||||
|
||||
DEFAULT_SITE_CONFIG_FILE
|
||||
|
||||
echo "===========New HIRS Config File==========="
|
||||
cat /etc/hirs/hirs-site.config
|
||||
}
|
||||
|
||||
# Install packages
|
||||
InstallProvisioner
|
||||
|
||||
# Install TPM 2.0 Emulator
|
||||
InitTpm2Emulator
|
||||
|
||||
# Update the hirs-site.config file
|
||||
UpdateHirsSiteConfigFile
|
||||
|
||||
# Set alias to use python3
|
||||
echo "===========Python Version==========="
|
||||
python3 --version
|
||||
alias python='/usr/bin/python3.6'
|
||||
alias
|
||||
|
||||
echo ""
|
||||
echo "TPM 2.0 Emulator NV RAM list"
|
||||
tpm2_nvlist
|
||||
|
||||
echo ""
|
||||
echo "===========HIRS ACA TPM 2.0 Provisioner Setup Complete!==========="
|
@ -1,23 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to setup the TPM 2.0 Provisioner Docker Image for System Tests
|
||||
#########################################################################################
|
||||
# Script to setup the TPM 2.0 Provisioner Docker Image for System Tests
|
||||
#
|
||||
#########################################################################################
|
||||
set -e
|
||||
pushd /
|
||||
echo "Setting up TPM emulator for the TPM2 Provisioner"
|
||||
|
||||
# Wait for ACA to boot
|
||||
echo "Waiting for ACA to spin up..."
|
||||
until [ "`curl --silent --connect-timeout 1 -I -k https://${HIRS_ACA_PORTAL_IP}:${HIRS_ACA_PORTAL_PORT}/HIRS_AttestationCAPortal | grep '302 Found'`" != "" ]; do
|
||||
:
|
||||
done
|
||||
echo "ACA is up!"
|
||||
|
||||
# Function to install TPM 2.0 Provisioner packages
|
||||
# Function to make and install TPM 2.0 Provisioner packages
|
||||
function InstallProvisioner {
|
||||
echo "===========Installing TPM 2.0 Provisioner Packages...==========="
|
||||
|
||||
pushd /HIRS
|
||||
if [ ! -d package/rpm/RPMS ]; then
|
||||
./package/package.centos.sh
|
||||
fi
|
||||
echo "Building the HIRS Provisioner ..."
|
||||
mkdir -p /HIRS/logs/provisioner/
|
||||
sh package/package.centos.sh &> /HIRS/logs/provisioner/provisioner_build.log
|
||||
echo "Installing the HIRS Provisioner ..."
|
||||
yum install -y package/rpm/RPMS/x86_64/HIRS_Provisioner_TPM_2_0*.el7.x86_64.rpm
|
||||
popd
|
||||
}
|
||||
@ -46,13 +43,13 @@ function InitTpm2Emulator {
|
||||
echo "TPM Emulator started"
|
||||
|
||||
# Give tpm_server time to start and register on the DBus
|
||||
sleep 5
|
||||
sleep 2
|
||||
|
||||
tpm2-abrmd -t socket &
|
||||
echo "TPM2-Abrmd started"
|
||||
|
||||
# Give ABRMD time to start and register on the DBus
|
||||
sleep 5
|
||||
sleep 2
|
||||
|
||||
# Certificates
|
||||
ek_cert="/HIRS/.ci/setup/certs/ek_cert.der"
|
||||
@ -60,22 +57,6 @@ function InitTpm2Emulator {
|
||||
ca_cert="/HIRS/.ci/setup/certs/ca.crt"
|
||||
platform_cert="platformAttributeCertificate.der"
|
||||
|
||||
# PACCOR directory
|
||||
PC_DIR=/var/hirs/pc_generation
|
||||
mkdir -p $PC_DIR
|
||||
|
||||
echo "Running PACCOR to generate local component information..."
|
||||
# Use specific PACCOR script for system testing.
|
||||
# Will provide default component SN#s when needed.
|
||||
cp -f /HIRS/.ci/system-tests/allcomponents_hirs_system_tests.sh /opt/paccor/scripts/allcomponents.sh
|
||||
/opt/paccor/scripts/allcomponents.sh > $PC_DIR/componentsFile
|
||||
/opt/paccor/scripts/referenceoptions.sh > $PC_DIR/optionsFile
|
||||
/opt/paccor/scripts/otherextensions.sh > $PC_DIR/extensionsFile
|
||||
|
||||
echo "Generating $platform_cert..."
|
||||
/opt/paccor/bin/observer -c $PC_DIR/componentsFile -p $PC_DIR/optionsFile -e $ek_cert -f $PC_DIR/observerFile
|
||||
/opt/paccor/bin/signer -o $PC_DIR/observerFile -x $PC_DIR/extensionsFile -b 20180101 -a 20280201 -N $RANDOM -k $ca_key -P $ca_cert -f $PC_DIR/$platform_cert
|
||||
|
||||
if tpm2_nvlist | grep -q 0x1c00002; then
|
||||
echo "Released NVRAM for EK."
|
||||
tpm2_nvrelease -x 0x1c00002 -a 0x40000001
|
||||
@ -98,14 +79,6 @@ function InitTpm2Emulator {
|
||||
tpm2_nvrelease -x 0x1c90000 -a 0x40000001
|
||||
fi
|
||||
|
||||
# Store the platform certificate in the TPM's NVRAM
|
||||
size=$(cat $PC_DIR/$platform_cert | wc -c)
|
||||
echo "Define NVRAM location for PC cert of size $size."
|
||||
tpm2_nvdefine -x 0x1c90000 -a 0x40000001 -t 0x2000A -s $size
|
||||
|
||||
echo "Loading PC cert $PC_DIR/$platform_cert into NVRAM."
|
||||
tpm2_nvwrite -x 0x1c90000 -a 0x40000001 $PC_DIR/$platform_cert
|
||||
|
||||
echo "===========TPM 2.0 Emulator Initialization Complete!==========="
|
||||
|
||||
# Set Logging to INFO Level
|
||||
@ -142,6 +115,19 @@ DEFAULT_SITE_CONFIG_FILE
|
||||
cat /etc/hirs/hirs-site.config
|
||||
}
|
||||
|
||||
function WaitForAca {
|
||||
# Wait for ACA to boot
|
||||
echo "Waiting for ACA to spin up at address ${HIRS_ACA_PORTAL_IP} on port ${HIRS_ACA_PORTAL_PORT} ..."
|
||||
until [ "`curl --silent --connect-timeout 1 -I -k https://${HIRS_ACA_PORTAL_IP}:${HIRS_ACA_PORTAL_PORT}/HIRS_AttestationCAPortal | grep '302 Found'`" != "" ]; do
|
||||
sleep 5;
|
||||
#echo "Checking on the ACA..."
|
||||
done
|
||||
echo "ACA is up!"
|
||||
}
|
||||
|
||||
#Wait for the ACA to spin up, if it hasnt already
|
||||
WaitForAca
|
||||
|
||||
# Install packages
|
||||
InstallProvisioner
|
||||
|
||||
@ -151,15 +137,11 @@ InitTpm2Emulator
|
||||
# Update the hirs-site.config file
|
||||
UpdateHirsSiteConfigFile
|
||||
|
||||
# Set alias to use python3
|
||||
echo "===========Python Version==========="
|
||||
python3 --version
|
||||
alias python='/usr/bin/python3.6'
|
||||
alias
|
||||
|
||||
echo ""
|
||||
echo "TPM 2.0 Emulator NV RAM list"
|
||||
tpm2_nvlist
|
||||
|
||||
echo ""
|
||||
echo "===========HIRS ACA TPM 2.0 Provisioner Setup Complete!==========="
|
||||
|
||||
popd
|
38
.ci/system-tests/aca_policy_tests.sh
Normal file
38
.ci/system-tests/aca_policy_tests.sh
Normal file
@ -0,0 +1,38 @@
|
||||
#!/bin/bash
|
||||
#########################################################################################
|
||||
# HIRS ACA Policy System Tests
|
||||
#
|
||||
#########################################################################################
|
||||
testResult=false
|
||||
totalTests=0;
|
||||
failedTests=0;
|
||||
|
||||
# Start ACA Policy Tests
|
||||
# provision_tpm takes 1 parameter (the expected result): "pass" or "fail"
|
||||
|
||||
echo "ACA POLICY TEST 1: Test ACA default policy "
|
||||
provision_tpm2 "pass"
|
||||
|
||||
echo "ACA POLICY TEST 2: Test EK cert Only Validation Policy without a EK Issuer Cert in the trust store"
|
||||
setPolicyEkOnly
|
||||
provision_tpm2 "fail"
|
||||
|
||||
echo "ACA POLICY TEST 3: Test EK Only Validation Policy"
|
||||
uploadTrustedCerts
|
||||
provision_tpm2 "pass"
|
||||
|
||||
echo "ACA POLICY TEST 4: Test PC Validation Policy with no PC"
|
||||
setPolicyEkPc_noAttCheck
|
||||
provision_tpm2 "fail"
|
||||
|
||||
echo "ACA POLICY TEST 5: Test FW and PC Validation Policy with no PC"
|
||||
setPolicyEkPcFw
|
||||
provision_tpm2 "fail"
|
||||
|
||||
# Process Test Results, any single failure will send back a failed result.
|
||||
if [[ $failedTests != 0 ]]; then
|
||||
export TEST_STATUS=1;
|
||||
echo "**** $failedTests out of $totalTests ACA Policy Tests Failed! ****"
|
||||
else
|
||||
echo "**** $totalTests ACA Policy Tests Passed! ****"
|
||||
fi
|
@ -1,886 +0,0 @@
|
||||
#!/bin/bash
|
||||
### NOTE: This file will be moved into the PACCOR project, in the "scripts" directory.
|
||||
### It's here for review, until I get permissions to the PACCOR project
|
||||
|
||||
### User customizable values
|
||||
APP_HOME="`dirname "$0"`"
|
||||
COMPONENTS_URI="" # Specify the optional components URI field
|
||||
COMPONENTS_URI_LOCAL_COPY_FOR_HASH="" # If empty, the optional hashAlgorithm and hashValue fields will not be included for the URI
|
||||
PROPERTIES_URI="" # Specify the optional properties URI field
|
||||
PROPERTIES_URI_LOCAL_COPY_FOR_HASH="" # If empty, the optional hashAlgorithm and hashValue fields will not be included for the URI
|
||||
ENTERPRISE_NUMBERS_FILE="$APP_HOME""/enterprise-numbers"
|
||||
PEN_ROOT="1.3.6.1.4.1." # OID root for the private enterprise numbers
|
||||
SMBIOS_SCRIPT="$APP_HOME""/smbios.sh"
|
||||
HW_SCRIPT="$APP_HOME""/hw.sh" # For components not covered by SMBIOS
|
||||
NVME_SCRIPT="$APP_HOME""/nvme.sh" # For nvme components, until lshw supports them
|
||||
|
||||
### SMBIOS Type Constants
|
||||
source $SMBIOS_SCRIPT
|
||||
SMBIOS_TYPE_PLATFORM="1"
|
||||
SMBIOS_TYPE_CHASSIS="3"
|
||||
SMBIOS_TYPE_BIOS="0"
|
||||
SMBIOS_TYPE_BASEBOARD="2"
|
||||
SMBIOS_TYPE_CPU="4"
|
||||
SMBIOS_TYPE_RAM="17"
|
||||
|
||||
### hw
|
||||
source $HW_SCRIPT
|
||||
source $NVME_SCRIPT
|
||||
|
||||
### ComponentClass values
|
||||
COMPCLASS_REGISTRY_TCG="2.23.133.18.3.1" # switch off values within SMBIOS to reveal accurate component classes
|
||||
COMPCLASS_BASEBOARD="00030003" # these values are meant to be an example. check the component class registry.
|
||||
COMPCLASS_BIOS="00130003"
|
||||
COMPCLASS_UEFI="00130002"
|
||||
COMPCLASS_CHASSIS="00020001" # TODO: chassis type is included in SMBIOS
|
||||
COMPCLASS_CPU="00010002"
|
||||
COMPCLASS_HDD="00070002"
|
||||
COMPCLASS_NIC="00090002"
|
||||
COMPCLASS_RAM="00060001" # TODO: memory type is included in SMBIOS
|
||||
COMPCLASS_GFX="00050002"
|
||||
|
||||
### JSON Structure Keywords
|
||||
JSON_COMPONENTS="COMPONENTS"
|
||||
JSON_COMPONENTSURI="COMPONENTSURI"
|
||||
JSON_PROPERTIES="PROPERTIES"
|
||||
JSON_PROPERTIESURI="PROPERTIESURI"
|
||||
JSON_PLATFORM="PLATFORM"
|
||||
#### JSON Component Keywords
|
||||
JSON_COMPONENTCLASS="COMPONENTCLASS"
|
||||
JSON_COMPONENTCLASSREGISTRY="COMPONENTCLASSREGISTRY"
|
||||
JSON_COMPONENTCLASSVALUE="COMPONENTCLASSVALUE"
|
||||
JSON_MANUFACTURER="MANUFACTURER"
|
||||
JSON_MODEL="MODEL"
|
||||
JSON_SERIAL="SERIAL"
|
||||
JSON_REVISION="REVISION"
|
||||
JSON_MANUFACTURERID="MANUFACTURERID"
|
||||
JSON_FIELDREPLACEABLE="FIELDREPLACEABLE"
|
||||
JSON_ADDRESSES="ADDRESSES"
|
||||
JSON_ETHERNETMAC="ETHERNETMAC"
|
||||
JSON_WLANMAC="WLANMAC"
|
||||
JSON_BLUETOOTHMAC="BLUETOOTHMAC"
|
||||
JSON_COMPONENTPLATFORMCERT="PLATFORMCERT"
|
||||
JSON_ATTRIBUTECERTIDENTIFIER="ATTRIBUTECERTIDENTIFIER"
|
||||
JSON_GENERICCERTIDENTIFIER="GENERICCERTIDENTIFIER"
|
||||
JSON_ISSUER="ISSUER"
|
||||
JSON_COMPONENTPLATFORMCERTURI="PLATFORMCERTURI"
|
||||
JSON_STATUS="STATUS"
|
||||
#### JSON Platform Keywords (Subject Alternative Name)
|
||||
JSON_PLATFORMMODEL="PLATFORMMODEL"
|
||||
JSON_PLATFORMMANUFACTURERSTR="PLATFORMMANUFACTURERSTR"
|
||||
JSON_PLATFORMVERSION="PLATFORMVERSION"
|
||||
JSON_PLATFORMSERIAL="PLATFORMSERIAL"
|
||||
JSON_PLATFORMMANUFACTURERID="PLATFORMMANUFACTURERID"
|
||||
#### JSON Platform URI Keywords
|
||||
JSON_URI="UNIFORMRESOURCEIDENTIFIER"
|
||||
JSON_HASHALG="HASHALGORITHM"
|
||||
JSON_HASHVALUE="HASHVALUE"
|
||||
#### JSON Properties Keywords
|
||||
JSON_NAME="NAME"
|
||||
JSON_VALUE="VALUE"
|
||||
NOT_SPECIFIED="Not Specified"
|
||||
CHASSIS_SERIAL_NUMBER="111111"
|
||||
BASEBOARD_SERIAL_NUMBER="222222"
|
||||
BIOS_SERIAL_NUMBER="333333"
|
||||
PARSE_CPU_DATA_SERIAL_NUMBER="111222"
|
||||
PARSE_RAM_DATA_SERIAL_NUMBER="222333"
|
||||
PARSE_NIC_DATA_SERIAL_NUMBER="333444"
|
||||
PARSE_HDD_DATA_SERIAL_NUMBER="444555"
|
||||
PARSE_GFX_DATA_SERIAL_NUMBER="555666"
|
||||
|
||||
### JSON Structure Format
|
||||
JSON_INTERMEDIATE_FILE_OBJECT='{
|
||||
%s
|
||||
}'
|
||||
JSON_PLATFORM_TEMPLATE='
|
||||
\"'"$JSON_PLATFORM"'\": {
|
||||
%s
|
||||
}'
|
||||
JSON_PROPERTIESURI_TEMPLATE='
|
||||
\"'"$JSON_PROPERTIESURI"'\": {
|
||||
%s
|
||||
}'
|
||||
JSON_COMPONENTSURI_TEMPLATE='
|
||||
\"'"$JSON_COMPONENTSURI"'\": {
|
||||
%s
|
||||
}'
|
||||
JSON_PROPERTY_ARRAY_TEMPLATE='
|
||||
\"'"$JSON_PROPERTIES"'\": [%s
|
||||
]'
|
||||
JSON_COMPONENT_ARRAY_TEMPLATE='
|
||||
\"'"$JSON_COMPONENTS"'\": [%s
|
||||
]'
|
||||
JSON_COMPONENT_TEMPLATE='
|
||||
{
|
||||
%s
|
||||
}'
|
||||
JSON_PROPERTY_TEMPLATE='
|
||||
{
|
||||
\"'"$JSON_NAME"'\": \"%s\",
|
||||
\"'"$JSON_VALUE"'\": \"%s\"
|
||||
}
|
||||
'
|
||||
JSON_ADDRESSES_TEMPLATE=' \"'"$JSON_ADDRESSES"'\": [%s]'
|
||||
JSON_ETHERNETMAC_TEMPLATE=' {
|
||||
\"'"$JSON_ETHERNETMAC"'\": \"%s\" } '
|
||||
JSON_WLANMAC_TEMPLATE=' {
|
||||
\"'"$JSON_WLANMAC"'\": \"%s\" } '
|
||||
JSON_BLUETOOTHMAC_TEMPLATE=' {
|
||||
\"'"$JSON_BLUETOOTHMAC"'\": \"%s\" } '
|
||||
JSON_COMPONENTCLASS_TEMPLATE=' \"'"$JSON_COMPONENTCLASS"'\": {
|
||||
\"'"$JSON_COMPONENTCLASSREGISTRY"'\": \"%s\",
|
||||
\"'"$JSON_COMPONENTCLASSVALUE"'\": \"%s\"
|
||||
}'
|
||||
JSON_ATTRIBUTECERTIDENTIFIER_TEMPLATE=' \"'"$JSON_ATTRIBUTECERTIDENTIFIER"'\": {
|
||||
\"'"$JSON_HASHALG"'\": \"%s\",
|
||||
\"'"$JSON_HASHVALUE"'\": \"%s\"
|
||||
},'
|
||||
JSON_GENERICCERTIDENTIFIER_TEMPLATE=' \"'"$JSON_GENERICCERTIDENTIFIER"'\": {
|
||||
\"'"$JSON_ISSUER"'\": \"%s\",
|
||||
\"'"$JSON_SERIAL"'\": \"%s\"
|
||||
},'
|
||||
JSON_COMPONENTPLATFORMCERT_TEMPLATE='
|
||||
\"'"$JSON_COMPONENTPLATFORMCERT"'\": {
|
||||
%s
|
||||
}'
|
||||
JSON_COMPONENTPLATFORMCERTURI_TEMPLATE='
|
||||
\"'"$JSON_COMPONENTPLATFORMCERTURI"'\": {
|
||||
%s
|
||||
}'
|
||||
JSON_STATUS_TEMPLATE='
|
||||
\"'"$JSON_STATUS"'\": {
|
||||
|
||||
}'
|
||||
|
||||
### JSON Constructor Aides
|
||||
jsonComponentClass () {
|
||||
printf "$JSON_COMPONENTCLASS_TEMPLATE" "${1}" "${2}"
|
||||
}
|
||||
jsonManufacturer () {
|
||||
manufacturer=$(printf '\"'"$JSON_MANUFACTURER"'\": \"%s\"' "${1}")
|
||||
#tmpManufacturerId=$(queryForPen "${1}")
|
||||
#if [ -n "$tmpManufacturerId" ] && [ "$tmpManufacturerId" != "$PEN_ROOT" ]; then
|
||||
# tmpManufacturerId=$(jsonManufacturerId "$tmpManufacturerId")
|
||||
# manufacturer="$manufacturer"",""$tmpManufacturerId"
|
||||
#fi
|
||||
printf "$manufacturer"
|
||||
}
|
||||
jsonModel () {
|
||||
printf '\"'"$JSON_MODEL"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonSerial () {
|
||||
printf '\"'"$JSON_SERIAL"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonRevision () {
|
||||
printf '\"'"$JSON_REVISION"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonManufacturerId () {
|
||||
printf '\"'"$JSON_MANUFACTURERID"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonFieldReplaceable () {
|
||||
printf '\"'"$JSON_FIELDREPLACEABLE"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonEthernetMac () {
|
||||
printf "$JSON_ETHERNETMAC_TEMPLATE" "${1}"
|
||||
}
|
||||
jsonWlanMac () {
|
||||
printf "$JSON_WLANMAC_TEMPLATE" "${1}"
|
||||
}
|
||||
jsonBluetoothMac () {
|
||||
printf "$JSON_BLUETOOTHMAC_TEMPLATE" "${1}"
|
||||
}
|
||||
jsonPlatformModel () {
|
||||
printf '\"'"$JSON_PLATFORMMODEL"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonPlatformManufacturerStr () {
|
||||
manufacturer=$(printf '\"'"$JSON_PLATFORMMANUFACTURERSTR"'\": \"%s\"' "${1}")
|
||||
#tmpManufacturerId=$(queryForPen "${1}")
|
||||
#if [ -n "$tmpManufacturerId" ] && [ "$tmpManufacturerId" != "$PEN_ROOT" ]; then
|
||||
# tmpManufacturerId=$(jsonPlatformManufacturerId "$tmpManufacturerId")
|
||||
# manufacturer="$manufacturer"",""$tmpManufacturerId"
|
||||
#fi
|
||||
printf "$manufacturer"
|
||||
}
|
||||
jsonPlatformVersion () {
|
||||
printf '\"'"$JSON_PLATFORMVERSION"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonPlatformSerial () {
|
||||
printf '\"'"$JSON_PLATFORMSERIAL"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonPlatformManufacturerId () {
|
||||
printf '\"'"$JSON_PLATFORMMANUFACTURERID"'\": \"%s\"' "${1}"
|
||||
}
|
||||
queryForPen () {
|
||||
pen=$(grep -B 1 "^[ \t]*""${1}""$" "$ENTERPRISE_NUMBERS_FILE" | sed -n '1p' | tr -d [:space:])
|
||||
printf "%s%s" "$PEN_ROOT" "$pen"
|
||||
}
|
||||
jsonProperty () {
|
||||
if [ -n "${1}" ] && [ -n "${2}" ]; then
|
||||
if [ -n "${3}" ]; then
|
||||
printf "$JSON_PROPERTY_TEMPLATE" "${1}" "${2}" "${3}"
|
||||
else
|
||||
printf "$JSON_PROPERTY_TEMPLATE" "${1}" "${2}"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
jsonUri () {
|
||||
printf '\"'"$JSON_URI"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonHashAlg () {
|
||||
printf '\"'"$JSON_HASHALG"'\": \"%s\"' "${1}"
|
||||
}
|
||||
jsonHashValue () {
|
||||
printf '\"'"$JSON_HASHVALUE"'\": \"%s\"' "${1}"
|
||||
}
|
||||
toCSV () {
|
||||
old="$IFS"
|
||||
IFS=','
|
||||
value="$*"
|
||||
value=$(printf "$value" | tr -s , | sed -e '1s/^[,]*//' | sed -e '$s/[,]*$//')
|
||||
printf "$value"
|
||||
}
|
||||
jsonAddress () {
|
||||
printf "$JSON_ADDRESSES_TEMPLATE" "$(toCSV "$@")"
|
||||
}
|
||||
jsonComponent () {
|
||||
printf "$JSON_COMPONENT_TEMPLATE" "$(toCSV "$@")"
|
||||
}
|
||||
jsonComponentArray () {
|
||||
printf "$JSON_COMPONENT_ARRAY_TEMPLATE" "$(toCSV "$@")"
|
||||
}
|
||||
jsonPropertyArray () {
|
||||
if [ "$#" -ne 0 ]; then
|
||||
printf "$JSON_PROPERTY_ARRAY_TEMPLATE" "$(toCSV "$@")"
|
||||
fi
|
||||
}
|
||||
jsonPlatformObject () {
|
||||
printf "$JSON_PLATFORM_TEMPLATE" "$(toCSV "$@")"
|
||||
}
|
||||
jsonComponentsUri () {
|
||||
if [ -n "$COMPONENTS_URI" ]; then
|
||||
componentsUri=$(jsonUri "$COMPONENTS_URI")
|
||||
componentsUriDetails=""
|
||||
if [ -n "$PROPERTIES_URI_LOCAL_COPY_FOR_HASH" ]; then
|
||||
hashAlg="2.16.840.1.101.3.4.2.1" # SHA256, see https://tools.ietf.org/html/rfc5754 for other common hash algorithm IDs
|
||||
hashValue=$(sha256sum "$COMPONENTS_URI_LOCAL_COPY_FOR_HASH" | sed -r 's/^([0-9a-f]+).*/\1/' | tr -d [:space:] | xxd -r -p | base64 -w 0)
|
||||
hashAlgStr=$(jsonHashAlg "$hashAlg")
|
||||
hashValueStr=$(jsonHashValue "$hashValue")
|
||||
propertiesUriDetails="$hashAlgStr"",""$hashValueStr"
|
||||
fi
|
||||
printf "$JSON_COMPONENTSURI_TEMPLATE" "$(toCSV "$componentsUri" "$componentsUriDetails")"
|
||||
fi
|
||||
}
|
||||
jsonPropertiesUri () {
|
||||
if [ -n "$PROPERTIES_URI" ]; then
|
||||
propertiesUri=$(jsonUri "$PROPERTIES_URI")
|
||||
propertiesUriDetails=""
|
||||
if [ -n "$PROPERTIES_URI_LOCAL_COPY_FOR_HASH" ]; then
|
||||
hashAlg="2.16.840.1.101.3.4.2.1" # SHA256, see https://tools.ietf.org/html/rfc5754 for other common hash algorithm IDs
|
||||
hashValue=$(sha256sum "$PROPERTIES_URI_LOCAL_COPY_FOR_HASH" | sed -r 's/^([0-9a-f]+).*/\1/' | tr -d [:space:] | xxd -r -p | base64 -w 0)
|
||||
hashAlgStr=$(jsonHashAlg "$hashAlg")
|
||||
hashValueStr=$(jsonHashValue "$hashValue")
|
||||
propertiesUriDetails="$hashAlgStr"",""$hashValueStr"
|
||||
fi| sed 's/^[ \t]*//;s/[ \t]*$//'
|
||||
printf "$JSON_PROPERTIESURI_TEMPLATE" "$(toCSV "$propertiesUri" "$propertiesUriDetails")"
|
||||
fi
|
||||
}
|
||||
jsonIntermediateFile () {
|
||||
printf "$JSON_INTERMEDIATE_FILE_OBJECT" "$(toCSV "$@")"
|
||||
}
|
||||
standardizeMACAddr () {
|
||||
mac=$(printf "${1}" | tr -d "[[:space:]]:-" | awk '{ print toupper($0) }')
|
||||
printf "$mac"
|
||||
}
|
||||
|
||||
|
||||
|
||||
## Some of the commands below require root.
|
||||
if [ "$EUID" -ne 0 ]
|
||||
then echo "Please run as root"
|
||||
exit
|
||||
fi
|
||||
|
||||
### Gather platform details for the subject alternative name
|
||||
dmidecodeParseTypeAssumeOneHandle "$SMBIOS_TYPE_PLATFORM"
|
||||
platformManufacturer=$(dmidecodeGetString $(dmidecodeGetByte "0x4"))
|
||||
platformModel=$(dmidecodeGetString $(dmidecodeGetByte "0x5"))
|
||||
platformVersion=$(dmidecodeGetString $(dmidecodeGetByte "0x6"))
|
||||
platformSerial=$(dmidecodeGetString $(dmidecodeGetByte "0x7"))
|
||||
|
||||
if [[ -z "${platformManufacturer// }" ]]; then
|
||||
platformManufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
platformManufacturer=$(echo "$platformManufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
platformManufacturer=$(jsonPlatformManufacturerStr "$platformManufacturer")
|
||||
|
||||
if [[ -z "${platformModel// }" ]]; then
|
||||
platformModel="$NOT_SPECIFIED"
|
||||
fi
|
||||
platformModel=$(echo "$platformModel" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
platformModel=$(jsonPlatformModel "$platformModel")
|
||||
|
||||
if [[ -z "${platformVersion// }" ]]; then
|
||||
platformVersion="$NOT_SPECIFIED"
|
||||
fi
|
||||
platformVersion=$(echo "$platformVersion" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
platformVersion=$(jsonPlatformVersion "$platformVersion")
|
||||
|
||||
if ! [[ -z "${platformSerial// }" ]]; then
|
||||
platformSerial=$(echo "$platformSerial" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
platformSerial=$(jsonPlatformSerial "$platformSerial")
|
||||
fi
|
||||
platform=$(jsonPlatformObject "$platformManufacturer" "$platformModel" "$platformVersion" "$platformSerial")
|
||||
|
||||
|
||||
|
||||
### Gather component details
|
||||
dmidecodeParseTypeAssumeOneHandle "$SMBIOS_TYPE_CHASSIS"
|
||||
chassisClass=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_CHASSIS")
|
||||
chassisManufacturer=$(dmidecodeGetString $(dmidecodeGetByte "0x4"))
|
||||
chassisModel=$(dmidecodeGetByte "0x5")
|
||||
chassisModel=$(printf "%d" "0x""$chassisModel") # Convert to decimal
|
||||
chassisSerial=$(dmidecodeGetString $(dmidecodeGetByte "0x7"))
|
||||
chassisRevision=$(dmidecodeGetString $(dmidecodeGetByte "0x6"))
|
||||
|
||||
if [[ -z "${chassisManufacturer// }" ]]; then
|
||||
chassisManufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
chassisManufacturer=$(echo "$chassisManufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
chassisManufacturer=$(jsonManufacturer "$chassisManufacturer")
|
||||
|
||||
if [[ -z "${chassisModel// }" ]]; then
|
||||
chassisModel="$NOT_SPECIFIED"
|
||||
fi
|
||||
chassisModel=$(echo "$chassisModel" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
chassisModel=$(jsonModel "$chassisModel")
|
||||
|
||||
chassisOptional=""
|
||||
if ! [[ -z "${chassisSerial// }" ]]; then
|
||||
chassisSerial=$(echo "$chassisSerial" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
chassisSerial=$(jsonSerial "$chassisSerial")
|
||||
chassisOptional="$chassisOptional"",""$chassisSerial"
|
||||
fi
|
||||
if ! [[ -z "${chassisRevision// }" ]]; then
|
||||
chassisRevision=$(echo "$chassisRevision" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
chassisRevision=$(jsonRevision "$chassisRevision")
|
||||
chassisOptional="$chassisOptional"",""$chassisRevision"
|
||||
fi
|
||||
chassisOptional=$(printf "$chassisOptional" | cut -c2-)
|
||||
# Use default SN#
|
||||
if [[ -z "${chassisOptional// }" ]]; then
|
||||
chassisSerial=$(jsonSerial "$CHASSIS_SERIAL_NUMBER")
|
||||
chassisOptional="$chassisOptional"",""$chassisSerial"
|
||||
fi
|
||||
|
||||
componentChassis=$(jsonComponent "$chassisClass" "$chassisManufacturer" "$chassisModel" "$chassisOptional")
|
||||
|
||||
### Gather baseboard details
|
||||
dmidecodeParseTypeAssumeOneHandle "$SMBIOS_TYPE_BASEBOARD"
|
||||
baseboardClass=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_BASEBOARD")
|
||||
baseboardManufacturer=$(dmidecodeGetString $(dmidecodeGetByte "0x4"))
|
||||
baseboardModel=$(dmidecodeGetString $(dmidecodeGetByte "0x5"))
|
||||
baseboardSerial=$(dmidecodeGetString $(dmidecodeGetByte "0x7"))
|
||||
baseboardRevision=$(dmidecodeGetString $(dmidecodeGetByte "0x6"))
|
||||
baseboardFeatureFlags=$(dmidecodeGetByte "0x9")
|
||||
baseboardFeatureFlags=$(printf "%d" "0x""$baseboardFeatureFlags") # Convert to decimal
|
||||
baseboardReplaceableIndicator="28"
|
||||
baseboardFieldReplaceableAnswer="false"
|
||||
if (((baseboardFeatureFlags&baseboardReplaceableIndicator)!=0)); then
|
||||
baseboardFieldReplaceableAnswer="true"
|
||||
fi
|
||||
baseboardFieldReplaceable=$(jsonFieldReplaceable "$baseboardFieldReplaceableAnswer")
|
||||
|
||||
if [[ -z "${baseboardManufacturer// }" ]]; then
|
||||
baseboardManufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
baseboardManufacturer=$(echo "$baseboardManufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
baseboardManufacturer=$(jsonManufacturer "$baseboardManufacturer")
|
||||
|
||||
if [[ -z "${baseboardModel// }" ]]; then
|
||||
baseboardModel="$NOT_SPECIFIED"
|
||||
fi
|
||||
baseboardModel=$(echo "$baseboardModel" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
baseboardModel=$(jsonModel "$baseboardModel")
|
||||
|
||||
baseboardOptional=""
|
||||
if ! [[ -z "${baseboardSerial// }" ]]; then
|
||||
baseboardSerial=$(echo "$baseboardSerial" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
baseboardSerial=$(jsonSerial "$baseboardSerial")
|
||||
baseboardOptional="$baseboardOptional"",""$baseboardSerial"
|
||||
fi
|
||||
if ! [[ -z "${baseboardRevision// }" ]]; then
|
||||
baseboardRevision=$(echo "$baseboardRevision" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
baseboardRevision=$(jsonRevision "$baseboardRevision")
|
||||
baseboardOptional="$baseboardOptional"",""$baseboardRevision"
|
||||
fi
|
||||
baseboardOptional=$(printf "$baseboardOptional" | cut -c2-)
|
||||
# Use default SN#
|
||||
if [[ -z "${baseboardOptional// }" ]]; then
|
||||
baseboardSerial=$(jsonSerial "$BASEBOARD_SERIAL_NUMBER")
|
||||
baseboardOptional="$baseboardOptional"",""$baseboardSerial"
|
||||
fi
|
||||
|
||||
componentBaseboard=$(jsonComponent "$baseboardClass" "$baseboardManufacturer" "$baseboardModel" "$baseboardFieldReplaceable" "$baseboardOptional")
|
||||
|
||||
### Gather BIOS details
|
||||
dmidecodeParseTypeAssumeOneHandle "$SMBIOS_TYPE_BIOS"
|
||||
biosClass=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_BIOS")
|
||||
biosManufacturer=$(dmidecodeGetString $(dmidecodeGetByte "0x4"))
|
||||
biosModel=""
|
||||
biosSerial=""
|
||||
biosRevision=$(dmidecodeGetString $(dmidecodeGetByte "0x5"))
|
||||
|
||||
if [[ -z "${biosManufacturer// }" ]]; then
|
||||
biosManufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
biosManufacturer=$(echo "$biosManufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
biosManufacturer=$(jsonManufacturer "$biosManufacturer")
|
||||
|
||||
if [[ -z "${biosModel// }" ]]; then
|
||||
biosModel="$NOT_SPECIFIED"
|
||||
fi
|
||||
biosModel=$(echo "$biosModel" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
biosModel=$(jsonModel "$biosModel")
|
||||
|
||||
biosOptional=""
|
||||
if ! [[ -z "${biosSerial// }" ]]; then
|
||||
biosSerial=$(echo "$biosSerial" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
biosSerial=$(jsonSerial "$biosSerial")
|
||||
biosOptional="$biosOptional"",""$biosSerial"
|
||||
fi
|
||||
if ! [[ -z "${biosRevision// }" ]]; then
|
||||
biosRevision=$(echo "$biosRevision" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
biosRevision=$(jsonRevision "$biosRevision")
|
||||
biosOptional="$biosOptional"",""$biosRevision"
|
||||
fi
|
||||
biosOptional=$(printf "$biosOptional" | cut -c2-)
|
||||
# Use default SN#
|
||||
if [[ -z "${biosOptional// }" ]]; then
|
||||
biosSerial=$(jsonSerial "$BIOS_SERIAL_NUMBER")
|
||||
biosOptional="$biosOptional"",""$biosSerial"
|
||||
fi
|
||||
|
||||
componentBios=$(jsonComponent "$biosClass" "$biosManufacturer" "$biosModel" "$biosOptional")
|
||||
|
||||
parseCpuData () {
|
||||
|
||||
dmidecodeHandles "$SMBIOS_TYPE_CPU"
|
||||
|
||||
notReplaceableIndicator="6"
|
||||
tmpData=""
|
||||
numHandles=$(dmidecodeNumHandles)
|
||||
class=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_CPU")
|
||||
|
||||
for ((i = 0 ; i < numHandles ; i++ )); do
|
||||
dmidecodeParseHandle "${tableHandles[$i]}"
|
||||
|
||||
manufacturer=$(dmidecodeGetString $(dmidecodeGetByte "0x7"))
|
||||
model=$(dmidecodeGetByte "0x6")
|
||||
model=$(printf "%d" "0x""$model") # Convert to decimal
|
||||
serial=$(dmidecodeGetString $(dmidecodeGetByte "0x20"))
|
||||
revision=$(dmidecodeGetString $(dmidecodeGetByte "0x10"))
|
||||
processorUpgrade=$(dmidecodeGetByte "0x19")
|
||||
processorUpgrade=$(printf "%d" "0x""$processorUpgrade") # Convert to decimal
|
||||
|
||||
if [[ -z "${manufacturer// }" ]]; then
|
||||
manufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
manufacturer=$(echo "$manufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
manufacturer=$(jsonManufacturer "$manufacturer")
|
||||
|
||||
if [[ -z "${model// }" ]]; then
|
||||
model="$NOT_SPECIFIED"
|
||||
fi
|
||||
model=$(echo "$model" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
model=$(jsonModel "$model")
|
||||
|
||||
optional=""
|
||||
if ! [[ -z "${serial// }" ]]; then
|
||||
serial=$(echo "$serial" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
serial=$(jsonSerial "$serial")
|
||||
optional="$optional"",""$serial"
|
||||
fi
|
||||
if ! [[ -z "${revision// }" ]]; then
|
||||
revision=$(echo "$revision" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
revision=$(jsonRevision "$revision")
|
||||
optional="$optional"",""$revision"
|
||||
fi
|
||||
optional=$(printf "$optional" | cut -c2-)
|
||||
# Use default SN#
|
||||
if [[ -z "${optional// }" ]]; then
|
||||
serial=$(jsonSerial "$PARSE_CPU_DATA_SERIAL_NUMBER")
|
||||
optional="$optional"",""$serial"
|
||||
PARSE_CPU_DATA_SERIAL_NUMBER=$((PARSE_CPU_DATA_SERIAL_NUMBER + 1))
|
||||
fi
|
||||
|
||||
replaceable="true"
|
||||
if [ $processorUpgrade -eq $notReplaceableIndicator ]; then
|
||||
replaceable="false"
|
||||
fi
|
||||
replaceable=$(jsonFieldReplaceable "$replaceable")
|
||||
|
||||
newCpuData=$(jsonComponent "$class" "$manufacturer" "$model" "$replaceable" "$optional")
|
||||
tmpData="$tmpData"",""$newCpuData"
|
||||
done
|
||||
|
||||
# remove leading comma
|
||||
tmpData=$(printf "$tmpData" | cut -c2-)
|
||||
|
||||
printf "$tmpData"
|
||||
}
|
||||
|
||||
parseRamData () {
|
||||
dmidecodeHandles "$SMBIOS_TYPE_RAM"
|
||||
|
||||
replaceable=$(jsonFieldReplaceable "true")
|
||||
tmpData=""
|
||||
numHandles=$(dmidecodeNumHandles)
|
||||
class=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_RAM")
|
||||
|
||||
for ((i = 0 ; i < numHandles ; i++ )); do
|
||||
dmidecodeParseHandle "${tableHandles[$i]}"
|
||||
|
||||
manufacturer=$(dmidecodeGetString $(dmidecodeGetByte "0x17"))
|
||||
model=$(dmidecodeGetString $(dmidecodeGetByte "0x1A"))
|
||||
serial=$(dmidecodeGetString $(dmidecodeGetByte "0x18"))
|
||||
revision=$(dmidecodeGetString $(dmidecodeGetByte "0x19"))
|
||||
|
||||
if ([[ -z "${manufacturer// }" ]] && [[ -z "${model// }" ]] && [[ -z "${serial// }" ]] && [[ -z "${revision// }" ]]); then
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ -z "${manufacturer// }" ]]; then
|
||||
manufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
manufacturer=$(echo "$manufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
manufacturer=$(jsonManufacturer "$manufacturer")
|
||||
|
||||
if [[ -z "${model// }" ]]; then
|
||||
model="$NOT_SPECIFIED"
|
||||
fi
|
||||
model=$(echo "$model" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
model=$(jsonModel "$model")
|
||||
|
||||
optional=""
|
||||
if ! [[ -z "${serial// }" ]]; then
|
||||
serial=$(echo "$serial" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
serial=$(jsonSerial "$serial")
|
||||
optional="$optional"",""$serial"
|
||||
fi
|
||||
if ! [[ -z "${revision// }" ]]; then
|
||||
revision=$(echo "$revision" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
revision=$(jsonRevision "$revision")
|
||||
optional="$optional"",""$revision"
|
||||
fi
|
||||
optional=$(printf "$optional" | cut -c2-)
|
||||
# Use default SN#
|
||||
if [[ -z "${optional// }" ]]; then
|
||||
serial=$(jsonSerial "$PARSE_RAM_DATA_SERIAL_NUMBER")
|
||||
optional="$optional"",""$serial"
|
||||
PARSE_RAM_DATA_SERIAL_NUMBER=$((PARSE_RAM_DATA_SERIAL_NUMBER + 1))
|
||||
fi
|
||||
|
||||
newRamData=$(jsonComponent "$class" "$manufacturer" "$model" "$replaceable" "$optional")
|
||||
tmpData="$tmpData"",""$newRamData"
|
||||
done
|
||||
|
||||
# remove leading comma
|
||||
tmpData=$(printf "$tmpData" | cut -c2-)
|
||||
|
||||
printf "$tmpData"
|
||||
}
|
||||
|
||||
# Write script to parse multiple responses
|
||||
# Network:
|
||||
# lshw description: type of address.
|
||||
# : Ethernet interface, Wireless interface, Bluetooth wireless interface
|
||||
# vendor: manufacturer
|
||||
# product: model
|
||||
# serial: address & serial number
|
||||
# version: revision
|
||||
#
|
||||
# Example:
|
||||
# ADDRESS1=$(jsonEthernetMac "AB:CD:EE:EE:DE:34")
|
||||
# ADDR_LIST=$(jsonAddress "$ADDRESS1" "$ADDRESS2")
|
||||
parseNicData () {
|
||||
lshwNetwork
|
||||
|
||||
replaceable=$(jsonFieldReplaceable "true")
|
||||
tmpData=""
|
||||
numHandles=$(lshwNumBusItems)
|
||||
class=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_NIC")
|
||||
|
||||
for ((i = 0 ; i < numHandles ; i++ )); do
|
||||
manufacturer=$(lshwGetVendorIDFromBusItem "$i")
|
||||
model=$(lshwGetProductIDFromBusItem "$i")
|
||||
serialConstant=$(lshwGetLogicalNameFromBusItem "$i")
|
||||
serialConstant=$(ethtoolPermAddr "$serialConstant")
|
||||
serialConstant=$(standardizeMACAddr "${serialConstant}")
|
||||
serial=""
|
||||
revision=$(lshwGetVersionFromBusItem "$i")
|
||||
|
||||
if [[ -z "${manufacturer// }" ]] && [[ -z "${model// }" ]] && (! [[ -z "${serialConstant// }" ]] || ! [[ -z "${revision// }" ]]); then
|
||||
manufacturer=$(lshwGetVendorNameFromBusItem "$i")
|
||||
model=$(lshwGetProductNameFromBusItem "$i")
|
||||
fi
|
||||
|
||||
if [[ -z "${manufacturer// }" ]]; then
|
||||
manufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
manufacturer=$(echo "$manufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
manufacturer=$(jsonManufacturer "$manufacturer")
|
||||
|
||||
if [[ -z "${model// }" ]]; then
|
||||
model="$NOT_SPECIFIED"
|
||||
fi
|
||||
model=$(echo "$model" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
model=$(jsonModel "$model")
|
||||
|
||||
optional=""
|
||||
if ! [[ -z "${serialConstant// }" ]]; then
|
||||
serial=$(echo "$serialConstant" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
serial=$(jsonSerial "$serialConstant")
|
||||
optional="$optional"",""$serial"
|
||||
fi
|
||||
if ! [[ -z "${revision// }" ]]; then
|
||||
revision=$(echo "$revision" | sed 's/^[ \t]*//;s/[ \t]*$//' | awk '{ print toupper($0) }')
|
||||
revision=$(jsonRevision "$revision")
|
||||
optional="$optional"",""$revision"
|
||||
fi
|
||||
bluetoothCap=$(lshwBusItemBluetoothCap "$i")
|
||||
ethernetCap=$(lshwBusItemEthernetCap "$i")
|
||||
wirelessCap=$(lshwBusItemWirelessCap "$i")
|
||||
|
||||
if ([ -n "$bluetoothCap" ] || [ -n "$ethernetCap" ] || [ -n "$wirelessCap" ]) && ! [[ -z "${serialConstant// }" ]]; then
|
||||
thisAddress=
|
||||
if [ -n "$wirelessCap" ]; then
|
||||
thisAddress=$(jsonWlanMac "$serialConstant")
|
||||
elif [ -n "$bluetoothCap" ]; then
|
||||
thisAddress=$(jsonBluetoothMac "$serialConstant")
|
||||
elif [ -n "$ethernetCap" ]; then
|
||||
thisAddress=$(jsonEthernetMac "$serialConstant")
|
||||
fi
|
||||
if [ -n "$thisAddress" ]; then
|
||||
thisAddress=$(jsonAddress "$thisAddress")
|
||||
optional="$optional"",""$thisAddress"
|
||||
fi
|
||||
fi
|
||||
optional=$(printf "$optional" | cut -c2-)
|
||||
# Use default SN#
|
||||
if [[ -z "${optional// }" ]]; then
|
||||
serial=$(jsonSerial "$PARSE_NIC_DATA_SERIAL_NUMBER")
|
||||
optional="$optional"",""$serial"
|
||||
PARSE_NIC_DATA_SERIAL_NUMBER=$((PARSE_NIC_DATA_SERIAL_NUMBER + 1))
|
||||
fi
|
||||
|
||||
newNicData=$(jsonComponent "$class" "$manufacturer" "$model" "$replaceable" "$optional")
|
||||
tmpData="$tmpData"",""$newNicData"
|
||||
done
|
||||
|
||||
# remove leading comma
|
||||
tmpData=$(printf "$tmpData" | cut -c2-)
|
||||
|
||||
printf "$tmpData"
|
||||
}
|
||||
|
||||
parseHddData () {
|
||||
lshwDisk
|
||||
|
||||
replaceable=$(jsonFieldReplaceable "true")
|
||||
tmpData=""
|
||||
numHandles=$(lshwNumBusItems)
|
||||
class=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_HDD")
|
||||
|
||||
for ((i = 0 ; i < numHandles ; i++ )); do
|
||||
manufacturer=$(lshwGetVendorIDFromBusItem "$i")
|
||||
model=$(lshwGetProductIDFromBusItem "$i")
|
||||
serial=$(lshwGetSerialFromBusItem "$i")
|
||||
revision=$(lshwGetVersionFromBusItem "$i")
|
||||
|
||||
if [[ -z "${manufacturer// }" ]] && [[ -z "${model// }" ]] && (! [[ -z "${serial// }" ]] || ! [[ -z "${revision// }" ]]); then
|
||||
model=$(lshwGetProductNameFromBusItem "$i")
|
||||
manufacturer=""
|
||||
revision="" # Seeing inconsistent behavior cross-OS for this case, will return
|
||||
fi
|
||||
|
||||
if [[ -z "${manufacturer// }" ]]; then
|
||||
manufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
manufacturer=$(echo "$manufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
manufacturer=$(jsonManufacturer "$manufacturer")
|
||||
|
||||
if [[ -z "${model// }" ]]; then
|
||||
model="$NOT_SPECIFIED"
|
||||
fi
|
||||
model=$(echo "$model" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
model=$(jsonModel "$model")
|
||||
|
||||
optional=""
|
||||
if ! [[ -z "${serial// }" ]]; then
|
||||
serial=$(echo "$serial" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
serial=$(jsonSerial "$serial")
|
||||
optional="$optional"",""$serial"
|
||||
fi
|
||||
if ! [[ -z "${revision// }" ]]; then
|
||||
revision=$(echo "$revision" | sed 's/^[ \t]*//;s/[ \t]*$//' | awk '{ print toupper($0) }')
|
||||
revision=$(jsonRevision "$revision")
|
||||
optional="$optional"",""$revision"
|
||||
fi
|
||||
optional=$(printf "$optional" | cut -c2-)
|
||||
# Use default SN#
|
||||
if [[ -z "${optional// }" ]]; then
|
||||
serial=$(jsonSerial "PARSE_HDD_DATA_SERIAL_NUMBER")
|
||||
optional="$optional"",""$serial"
|
||||
PARSE_HDD_DATA_SERIAL_NUMBER=$((PARSE_HDD_DATA_SERIAL_NUMBER + 1))
|
||||
fi
|
||||
|
||||
newHddData=$(jsonComponent "$class" "$manufacturer" "$model" "$replaceable" "$optional")
|
||||
tmpData="$tmpData"",""$newHddData"
|
||||
done
|
||||
|
||||
# remove leading comma
|
||||
tmpData=$(printf "$tmpData" | cut -c2-)
|
||||
|
||||
printf "$tmpData"
|
||||
}
|
||||
|
||||
parseNvmeData () {
|
||||
nvmeParse
|
||||
|
||||
replaceable=$(jsonFieldReplaceable "true")
|
||||
tmpData=""
|
||||
numHandles=$(nvmeNumDevices)
|
||||
class=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_HDD")
|
||||
|
||||
for ((i = 0 ; i < numHandles ; i++ )); do
|
||||
manufacturer="" # Making this appear as it does on windows, lshw doesn't see nvme drives and nvme-cli doesn't return a manufacturer field
|
||||
model=$(nvmeGetModelNumberForDevice "$i")
|
||||
serial=$(nvmeGetNguidForDevice "$i")
|
||||
|
||||
if [[ $serial =~ ^[0]+$ ]]; then
|
||||
serial=$(nvmeGetEuiForDevice "$i")
|
||||
fi
|
||||
revision="" # empty for a similar reason to the manufacturer field
|
||||
|
||||
if [[ -z "${manufacturer// }" ]]; then
|
||||
manufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
manufacturer=$(echo "$manufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
manufacturer=$(jsonManufacturer "$manufacturer")
|
||||
|
||||
if [[ -z "${model// }" ]]; then
|
||||
model="$NOT_SPECIFIED"
|
||||
fi
|
||||
model=$(echo "${model:0:16}" | sed 's/^[ \t]*//;s/[ \t]*$//') # limited to 16 characters for compatibility to windows, then trimmed
|
||||
model=$(jsonModel "$model")
|
||||
|
||||
optional=""
|
||||
if ! [[ -z "${serial// }" ]]; then
|
||||
serial=$(echo "${serial^^}" | sed 's/^[ \t]*//;s/[ \t]*$//' | sed 's/.\{4\}/&_/g' | sed 's/_$/\./')
|
||||
serial=$(jsonSerial "$serial")
|
||||
optional="$optional"",""$serial"
|
||||
fi
|
||||
optional=$(printf "$optional" | cut -c2-)
|
||||
|
||||
newHddData=$(jsonComponent "$class" "$manufacturer" "$model" "$replaceable" "$optional")
|
||||
tmpData="$tmpData"",""$newHddData"
|
||||
done
|
||||
|
||||
# remove leading comma
|
||||
tmpData=$(printf "$tmpData" | cut -c2-)
|
||||
|
||||
printf "$tmpData"
|
||||
}
|
||||
|
||||
parseGfxData () {
|
||||
lshwDisplay
|
||||
|
||||
replaceable=$(jsonFieldReplaceable "true")
|
||||
tmpData=""
|
||||
numHandles=$(lshwNumBusItems)
|
||||
class=$(jsonComponentClass "$COMPCLASS_REGISTRY_TCG" "$COMPCLASS_GFX")
|
||||
|
||||
for ((i = 0 ; i < numHandles ; i++ )); do
|
||||
manufacturer=$(lshwGetVendorIDFromBusItem "$i")
|
||||
model=$(lshwGetProductIDFromBusItem "$i")
|
||||
serial=$(lshwGetSerialFromBusItem "$i")
|
||||
revision=$(lshwGetVersionFromBusItem "$i")
|
||||
|
||||
if [[ -z "${manufacturer// }" ]] && [[ -z "${model// }" ]] && (! [[ -z "${serial// }" ]] || ! [[ -z "${revision// }" ]]); then
|
||||
manufacturer=$(lshwGetVendorNameFromBusItem "$i")
|
||||
model=$(lshwGetProductNameFromBusItem "$i")
|
||||
fi
|
||||
|
||||
if [[ -z "${manufacturer// }" ]]; then
|
||||
manufacturer="$NOT_SPECIFIED"
|
||||
fi
|
||||
manufacturer=$(echo "$manufacturer" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
manufacturer=$(jsonManufacturer "$manufacturer")
|
||||
|
||||
if [[ -z "${model// }" ]]; then
|
||||
model="$NOT_SPECIFIED"
|
||||
fi
|
||||
model=$(echo "$model" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
model=$(jsonModel "$model")
|
||||
|
||||
optional=""
|
||||
if ! [[ -z "${serial// }" ]]; then
|
||||
serial=$(echo "$serial" | sed 's/^[ \t]*//;s/[ \t]*$//')
|
||||
serial=$(jsonSerial "$serial")
|
||||
optional="$optional"",""$serial"
|
||||
fi
|
||||
if ! [[ -z "${revision// }" ]]; then
|
||||
revision=$(echo "$revision" | sed 's/^[ \t]*//;s/[ \t]*$//' | awk '{ print toupper($0) }')
|
||||
revision=$(jsonRevision "$revision")
|
||||
optional="$optional"",""$revision"
|
||||
fi
|
||||
optional=$(printf "$optional" | cut -c2-)
|
||||
# Use default SN#
|
||||
if [[ -z "${optional// }" ]]; then
|
||||
serial=$(jsonSerial "PARSE_GFX_DATA_SERIAL_NUMBER")
|
||||
optional="$optional"",""$serial"
|
||||
PARSE_GFX_DATA_SERIAL_NUMBER=$((PARSE_GFX_DATA_SERIAL_NUMBER + 1))
|
||||
fi
|
||||
|
||||
newGfxData=$(jsonComponent "$class" "$manufacturer" "$model" "$replaceable" "$optional")
|
||||
tmpData="$tmpData"",""$newGfxData"
|
||||
done
|
||||
|
||||
# remove leading comma
|
||||
tmpData=$(printf "$tmpData" | cut -c2-)
|
||||
|
||||
printf "$tmpData"
|
||||
}
|
||||
|
||||
|
||||
### Gather property details
|
||||
property1=$(jsonProperty "uname -r" "$(uname -r)") ## Example1
|
||||
property2=$(jsonProperty "OS Release" "$(grep 'PRETTY_NAME=' /etc/os-release | sed 's/[^=]*=//' | sed -e 's/^[[:space:]\"]*//' | sed -e 's/[[:space:]\"]*$//')") ## Example2
|
||||
|
||||
### Collate the component details
|
||||
componentsCPU=$(parseCpuData)
|
||||
componentsRAM=$(parseRamData)
|
||||
componentsNIC=$(parseNicData)
|
||||
componentsHDD=$(parseHddData)
|
||||
componentsGFX=$(parseGfxData)
|
||||
componentArray=$(jsonComponentArray "$componentChassis" "$componentBaseboard" "$componentBios" "$componentsCPU" "$componentsRAM" "$componentsNIC" "$componentsHDD" "$componentsNVMe" "$componentsGFX")
|
||||
|
||||
### Collate the property details
|
||||
propertyArray=$(jsonPropertyArray "$property1" "$property2")
|
||||
|
||||
### Construct the final JSON object
|
||||
FINAL_JSON_OBJECT=$(jsonIntermediateFile "$platform" "$componentArray" "$propertyArray")
|
||||
|
||||
### Collate the URI details, if parameters above are blank, the fields will be excluded from the final JSON structure
|
||||
if [ -n "$COMPONENTS_URI" ]; then
|
||||
componentsUri=$(jsonComponentsUri)
|
||||
FINAL_JSON_OBJECT="$FINAL_JSON_OBJECT"",""$componentsUri"
|
||||
fi
|
||||
if [ -n "$PROPERTIES_URI" ]; then
|
||||
propertiesUri=$(jsonPropertiesUri)
|
||||
FINAL_JSON_OBJECT="$FINAL_JSON_OBJECT"",""$propertiesUri"
|
||||
fi
|
||||
|
||||
printf "$FINAL_JSON_OBJECT""\n\n"
|
||||
|
||||
|
@ -1,64 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to run the System Tests Base/Delta(Bad) for HIRS TPM 2.0 Provisioner
|
||||
|
||||
set -e
|
||||
|
||||
echo ""
|
||||
echo "System Tests Base/Delta(Bad) TPM 2.0 Starting..."
|
||||
echo ""
|
||||
|
||||
# Start System Testing Docker Environment
|
||||
cd .ci/docker
|
||||
|
||||
docker-compose -f docker-compose-tpm2-base-delta-bad.yml up -d
|
||||
|
||||
tpm2_container_id="$(docker ps -aqf "name=hirs-aca-provisioner-tpm2")"
|
||||
echo "TPM 2.0 Container ID: $tpm2_container_id"
|
||||
|
||||
tpm2_container_status="$(docker inspect $tpm2_container_id --format='{{.State.Status}}')"
|
||||
echo "TPM 2.0 Container Status: $tpm2_container_status"
|
||||
|
||||
while [[ $tpm2_container_status == "running" ]]
|
||||
do
|
||||
sleep 20
|
||||
|
||||
# Add status message, so Travis will not time out.
|
||||
# It may timeout if it hasn't received output for more than 10 minutes.
|
||||
echo "Still running tests, please wait..."
|
||||
|
||||
tpm2_container_status="$(docker inspect $tpm2_container_id --format='{{.State.Status}}')"
|
||||
done
|
||||
|
||||
# Store container exit code
|
||||
tpm2_container_exit_code="$(docker inspect $tpm2_container_id --format='{{.State.ExitCode}}')"
|
||||
echo "TPM 2.0 Container Exit Code: $tpm2_container_exit_code"
|
||||
|
||||
# Display container log
|
||||
echo ""
|
||||
echo "===========hirs-aca-provisioner-tpm2 System Tests Log:==========="
|
||||
docker logs $tpm2_container_id
|
||||
|
||||
echo ""
|
||||
echo "End of System Tests Base/Delta(Bad) TPM 2.0 , cleaning up..."
|
||||
echo ""
|
||||
# Clean up services and network
|
||||
docker-compose down
|
||||
|
||||
# Clean up dangling containers
|
||||
echo "Cleaning up dangling containers..."
|
||||
echo ""
|
||||
docker ps -a
|
||||
echo ""
|
||||
docker container prune --force
|
||||
echo ""
|
||||
|
||||
# Return container exit code
|
||||
if [[ $tpm2_container_exit_code == 0 ]]
|
||||
then
|
||||
echo "SUCCESS: System Tests Base/Delta(Bad) TPM 2.0 passed"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "ERROR: System Tests Base/Delta(Bad) TPM 2.0 failed"
|
||||
exit 1
|
@ -1,64 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to run the System Tests Base/Delta(Good) for HIRS TPM 2.0 Provisioner
|
||||
|
||||
set -e
|
||||
|
||||
echo ""
|
||||
echo "System Tests Base/Delta(Good) TPM 2.0 Starting..."
|
||||
echo ""
|
||||
|
||||
# Start System Testing Docker Environment
|
||||
cd .ci/docker
|
||||
|
||||
docker-compose -f docker-compose-tpm2-base-delta-good.yml up -d
|
||||
|
||||
tpm2_container_id="$(docker ps -aqf "name=hirs-aca-provisioner-tpm2")"
|
||||
echo "TPM 2.0 Container ID: $tpm2_container_id"
|
||||
|
||||
tpm2_container_status="$(docker inspect $tpm2_container_id --format='{{.State.Status}}')"
|
||||
echo "TPM 2.0 Container Status: $tpm2_container_status"
|
||||
|
||||
while [[ $tpm2_container_status == "running" ]]
|
||||
do
|
||||
sleep 20
|
||||
|
||||
# Add status message, so Travis will not time out.
|
||||
# It may timeout if it hasn't received output for more than 10 minutes.
|
||||
echo "Still running tests, please wait..."
|
||||
|
||||
tpm2_container_status="$(docker inspect $tpm2_container_id --format='{{.State.Status}}')"
|
||||
done
|
||||
|
||||
# Store container exit code
|
||||
tpm2_container_exit_code="$(docker inspect $tpm2_container_id --format='{{.State.ExitCode}}')"
|
||||
echo "TPM 2.0 Container Exit Code: $tpm2_container_exit_code"
|
||||
|
||||
# Display container log
|
||||
echo ""
|
||||
echo "===========hirs-aca-provisioner-tpm2 System Tests Log:==========="
|
||||
docker logs $tpm2_container_id
|
||||
|
||||
echo ""
|
||||
echo "End of System Tests Base/Delta(Good) TPM 2.0, cleaning up..."
|
||||
echo ""
|
||||
# Clean up services and network
|
||||
docker-compose down
|
||||
|
||||
# Clean up dangling containers
|
||||
echo "Cleaning up dangling containers..."
|
||||
echo ""
|
||||
docker ps -a
|
||||
echo ""
|
||||
docker container prune --force
|
||||
echo ""
|
||||
|
||||
# Return container exit code
|
||||
if [[ $tpm2_container_exit_code == 0 ]]
|
||||
then
|
||||
echo "SUCCESS: System Tests Base/Delta(Good) TPM 2.0 passed"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "ERROR: System Tests Base/Delta(Good) TPM 2.0 failed"
|
||||
exit 1
|
@ -1,64 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to run the System Tests for HIRS TPM 2.0 Provisioner
|
||||
|
||||
set -e
|
||||
|
||||
echo ""
|
||||
echo "System Tests TPM 2.0 Starting..."
|
||||
echo ""
|
||||
|
||||
# Start System Testing Docker Environment
|
||||
cd .ci/docker
|
||||
|
||||
docker-compose -f docker-compose-tpm2.yml up -d
|
||||
|
||||
tpm2_container_id="$(docker ps -aqf "name=hirs-aca-provisioner-tpm2")"
|
||||
echo "TPM 2.0 Container ID: $tpm2_container_id"
|
||||
|
||||
tpm2_container_status="$(docker inspect $tpm2_container_id --format='{{.State.Status}}')"
|
||||
echo "TPM 2.0 Container Status: $tpm2_container_status"
|
||||
|
||||
while [[ $tpm2_container_status == "running" ]]
|
||||
do
|
||||
sleep 20
|
||||
|
||||
# Add status message, so Travis will not time out.
|
||||
# It may timeout if it hasn't received output for more than 10 minutes.
|
||||
echo "Still running tests, please wait..."
|
||||
|
||||
tpm2_container_status="$(docker inspect $tpm2_container_id --format='{{.State.Status}}')"
|
||||
done
|
||||
|
||||
# Store container exit code
|
||||
tpm2_container_exit_code="$(docker inspect $tpm2_container_id --format='{{.State.ExitCode}}')"
|
||||
echo "TPM 2.0 Container Exit Code: $tpm2_container_exit_code"
|
||||
|
||||
# Display container log
|
||||
echo ""
|
||||
echo "===========hirs-aca-provisioner-tpm2 System Tests Log:==========="
|
||||
docker logs $tpm2_container_id
|
||||
|
||||
echo ""
|
||||
echo "End of System Tests TPM 2.0, cleaning up..."
|
||||
echo ""
|
||||
# Clean up services and network
|
||||
docker-compose down
|
||||
|
||||
# Clean up dangling containers
|
||||
echo "Cleaning up dangling containers..."
|
||||
echo ""
|
||||
docker ps -a
|
||||
echo ""
|
||||
docker container prune --force
|
||||
echo ""
|
||||
|
||||
# Return container exit code
|
||||
if [[ $tpm2_container_exit_code == 0 ]]
|
||||
then
|
||||
echo "SUCCESS: System Tests TPM 2.0 passed"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "ERROR: System Tests TPM 2.0 failed"
|
||||
exit 1
|
92
.ci/system-tests/run-system-tests.sh
Executable file → Normal file
92
.ci/system-tests/run-system-tests.sh
Executable file → Normal file
@ -1,56 +1,80 @@
|
||||
#!/bin/bash
|
||||
#########################################################################################
|
||||
# Script to run the System Tests for HIRS TPM 2.0 Provisoner
|
||||
#
|
||||
#########################################################################################
|
||||
aca_container=hirs-aca1
|
||||
tpm2_container=hirs-provisioner1-tpm2
|
||||
testResult="passed";
|
||||
issuerCert=../setup/certs/ca.crt
|
||||
|
||||
# Script to run the System Tests for HIRS TPM 1.2 Provisioner
|
||||
# Source files for Docker Variables and helper scripts
|
||||
. ./.ci/docker/.env
|
||||
|
||||
set -e
|
||||
set -a
|
||||
|
||||
echo ""
|
||||
echo "System Tests TPM 1.2 Starting..."
|
||||
echo ""
|
||||
echo "******** Setting up for HIRS System Tests for TPM 2.0 ******** "
|
||||
|
||||
# Start System Testing Docker Environment
|
||||
cd .ci/docker
|
||||
|
||||
docker-compose up -d
|
||||
docker-compose -f docker-compose-system-test.yml up -d
|
||||
|
||||
tpm_container_id="$(docker ps -aqf "name=hirs-aca-provisioner")"
|
||||
echo "TPM 1.2 Container ID: $tpm_container_id"
|
||||
cd ../system-tests
|
||||
source sys_test_common.sh
|
||||
|
||||
tpm_container_status="$(docker inspect $tpm_container_id --format='{{.State.Status}}')"
|
||||
echo "TPM 1.2 Container Status: $tpm_container_status"
|
||||
aca_container_id="$(docker ps -aqf "name=$aca_container")"
|
||||
tpm2_container_id="$(docker ps -aqf "name=$tpm2_container")"
|
||||
|
||||
while [[ $tpm_container_status == "running" ]]
|
||||
do
|
||||
sleep 20
|
||||
echo "ACA Container ID is $aca_container_id and has a status of $(CheckContainerStatus $aca_container_id)";
|
||||
echo "TPM2 Provisioner Container ID is $tpm2_container_id and has a status of $(CheckContainerStatus $tpm2_container_id)";
|
||||
|
||||
# Add status message, so Travis will not time out.
|
||||
# It may timeout if it hasn't received output for more than 10 minutes.
|
||||
echo "Still running tests, please wait..."
|
||||
# Install HIRS provioner and setup tpm2 emulator
|
||||
docker exec $tpm2_container /HIRS/.ci/setup/setup-tpm2provisioner.sh
|
||||
|
||||
tpm_container_status="$(docker inspect $tpm_container_id --format='{{.State.Status}}')"
|
||||
done
|
||||
# ********* Execute system tests here, add tests as needed *************
|
||||
echo "******** Setup Complete Begin HIRS System Tests ******** "
|
||||
|
||||
# Store container exit codes
|
||||
tpm_container_exit_code="$(docker inspect $tpm_container_id --format='{{.State.ExitCode}}')"
|
||||
echo "TPM 1.2 Container Exit Code: $tpm_container_exit_code"
|
||||
source aca_policy_tests.sh
|
||||
|
||||
# Display container logs
|
||||
echo "******** HIRS System Tests Complete ******** "
|
||||
|
||||
# collecting ACA logs for archiving
|
||||
echo "Collecting ACA logs ....."
|
||||
docker exec $aca_container mkdir -p /HIRS/logs/aca/;
|
||||
docker exec $aca_container cp -a /var/log/tomcat/. /HIRS/logs/aca/;
|
||||
docker exec $aca_container chmod -R 777 /HIRS/logs/;
|
||||
echo "Collecting provisioner logs"
|
||||
docker exec $tpm2_container mkdir -p /HIRS/logs/provisioner/;
|
||||
docker exec $tpm2_container cp -a /var/log/hirs/provisioner/. /HIRS/logs/provisioner/;
|
||||
docker exec $tpm2_container chmod -R 777 /HIRS/logs/;
|
||||
|
||||
# Display container log
|
||||
echo ""
|
||||
echo "===========hirs-aca-provisioner System Tests Log:==========="
|
||||
docker logs $tpm_container_id
|
||||
echo "===========HIRS Tests and Log collection complete ==========="
|
||||
#docker logs $tpm2_container_id
|
||||
|
||||
echo ""
|
||||
echo "End of System Tests TPM 1.2, cleaning up..."
|
||||
echo "End of System Tests for TPM 2.0, cleaning up..."
|
||||
echo ""
|
||||
# Clean up services and network
|
||||
docker-compose down
|
||||
|
||||
# Return container exit codes
|
||||
if [[ $tpm_container_exit_code == 0 ]]
|
||||
then
|
||||
echo "SUCCESS: System Tests TPM 1.2 passed"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "ERROR: System Tests TPM 1.2 failed"
|
||||
exit 1
|
||||
# Clean up dangling containers
|
||||
echo "Cleaning up dangling containers..."
|
||||
echo ""
|
||||
docker ps -a
|
||||
echo ""
|
||||
docker container prune --force
|
||||
echo ""
|
||||
echo "New value of test status is ${TEST_STATUS}"
|
||||
# Return container exit code
|
||||
if [[ ${TEST_STATUS} == "0" ]]; then
|
||||
echo "SUCCESS: System Tests for TPM 2.0 passed"
|
||||
echo "TEST_STATUS=0" >> $GITHUB_ENV
|
||||
exit 0;
|
||||
else
|
||||
echo "FAILURE: System Tests for TPM 2.0 failed"
|
||||
echo "TEST_STATUS=1" >> $GITHUB_ENV
|
||||
exit 1
|
||||
fi
|
79
.ci/system-tests/sys_test_common.sh
Normal file
79
.ci/system-tests/sys_test_common.sh
Normal file
@ -0,0 +1,79 @@
|
||||
#!/bin/bash
|
||||
#########################################################################################
|
||||
# Common functions used for HIRS system tests
|
||||
#
|
||||
#########################################################################################
|
||||
|
||||
CheckContainerStatus() {
|
||||
container_id=$1
|
||||
container_status="$(docker inspect $container_id --format='{{.State.Status}}')"
|
||||
echo "Container Status: $container_status"
|
||||
|
||||
if [ "$container_status" != "running" ]; then
|
||||
container_exit_code="$(docker inspect $container_id --format='{{.State.ExitCode}}')"
|
||||
echo "Container Exit Code: $container_exit_code"
|
||||
docker info
|
||||
exit 1;
|
||||
fi
|
||||
}
|
||||
|
||||
setPolicyNone() {
|
||||
docker exec $aca_container mysql -u root -D hirs_db -e"Update SupplyChainPolicy set enableEcValidation=0, enablePcAttributeValidation=0, enablePcValidation=0, enableUtcValidation=0, enableFirmwareValidation=0, enableExpiredCertificateValidation=0;"
|
||||
}
|
||||
|
||||
setPolicyEkOnly() {
|
||||
docker exec $aca_container mysql -u root -D hirs_db -e"Update SupplyChainPolicy set enableEcValidation=1, enablePcAttributeValidation=0, enablePcValidation=0, enableUtcValidation=0, enableFirmwareValidation=0, enableExpiredCertificateValidation=0;"
|
||||
}
|
||||
|
||||
setPolicyEkPc_noAttCheck() {
|
||||
docker exec $aca_container mysql -u root -D hirs_db -e"Update SupplyChainPolicy set enableEcValidation=1, enablePcAttributeValidation=0, enablePcValidation=1, enableUtcValidation=0, enableFirmwareValidation=0, enableExpiredCertificateValidation=0;"
|
||||
}
|
||||
|
||||
setPolicyEkPc() {
|
||||
docker exec $aca_container mysql -u root -D hirs_db -e"Update SupplyChainPolicy set enableEcValidation=1, enablePcAttributeValidation=1, enablePcValidation=1, enableUtcValidation=0, enableFirmwareValidation=0, enableExpiredCertificateValidation=0;"
|
||||
}
|
||||
|
||||
setPolicyEkPcFw() {
|
||||
docker exec $aca_container mysql -u root -D hirs_db -e"Update SupplyChainPolicy set enableEcValidation=1, enablePcAttributeValidation=1, enablePcValidation=1, enableUtcValidation=0, enableFirmwareValidation=1, enableExpiredCertificateValidation=0;"
|
||||
}
|
||||
|
||||
uploadTrustedCerts() {
|
||||
curl -k -s -F "file=@$issuerCert" https://${HIRS_ACA_PORTAL_IP}:8443/HIRS_AttestationCAPortal/portal/certificate-request/trust-chain/upload
|
||||
}
|
||||
|
||||
# provision_tpm2 takes one parameter which is the expected result of the provion: "pass" or "fail"
|
||||
# updates totalTests and failedTests counts
|
||||
provision_tpm2() {
|
||||
expected_result=$1
|
||||
((totalTests++))
|
||||
provisionOutput=$(docker exec $tpm2_container tpm_aca_provision);
|
||||
echo "==========="
|
||||
echo "$provisionOutput";
|
||||
echo "===========";
|
||||
if [[ $provisionOutput == *"failed"* ]]; then
|
||||
if [[ $expected_result == "pass" ]]; then
|
||||
((failedTests++))
|
||||
echo "!!! Provisiong failed, but was expected to pass"
|
||||
else
|
||||
echo "Provisiong failed as expected."
|
||||
fi
|
||||
else # provisioning succeeded
|
||||
if [[ $expected_result == "fail" ]]; then
|
||||
((failedTests++))
|
||||
echo "!!! Provisiong passed, but was expected to fail"
|
||||
else
|
||||
echo "Provisiong passed as expected."
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
clearAcaDb() {
|
||||
docker exec $aca_container mysql -u root -e "use hirs_db; set foreign_key_checks=0; truncate Alert;truncate AlertBaselineIds;truncate
|
||||
AppraisalResult;truncate Certificate;truncate Certificate_Certificate;truncate CertificatesUsedToValidate;truncate
|
||||
ComponentInfo;truncate Device;truncate DeviceInfoReport;truncate IMADeviceState;truncate IMAMeasurementRecord;truncate
|
||||
ImaBlacklistRecord;truncate ImaIgnoreSetRecord;truncate IntegrityReport;truncate IntegrityReports_Reports_Join;truncate
|
||||
RepoPackage_IMABaselineRecord;truncate Report;truncate ReportMapper;truncate ReportRequestState;truncate ReportSummary;truncate
|
||||
State;truncate SupplyChainValidation;truncate SupplyChainValidationSummary;truncate ReferenceManifest;truncate
|
||||
SupplyChainValidationSummary_SupplyChainValidation;truncate TPM2ProvisionerState;truncate TPMBaselineRecords;truncate
|
||||
TPMDeviceState;truncate TPMReport;truncate TPMReport_pcrValueList; set foreign_key_checks=1;"
|
||||
}
|
@ -1,982 +0,0 @@
|
||||
# system_test.py - implements a group of tests that run appraisals on a client and server.
|
||||
# TODO: test_02-test_12 will need to be implemented when the additional HIRS
|
||||
# projects are imported to the new GitHub repo. The test code is commented out for now.
|
||||
|
||||
from __future__ import print_function
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
import urllib3
|
||||
|
||||
from system_test_core import DEFAULT_IMA_POLICY, DEFAULT_TPM_POLICY, \
|
||||
HIRSPortal, AttestationCAPortal, collectors, \
|
||||
send_command, send_command_sha1sum, run_hirs_report, run_hirs_provisioner_tpm_1_2, \
|
||||
run_hirs_provisioner_tpm_2_0, parse_xml_with_stripped_namespaces, \
|
||||
get_all_nodes_recursively, touch_random_file_and_remove, get_random_pcr_hex_value, \
|
||||
get_current_timestamp, is_ubuntu_client, is_tpm_2_0, is_tpm_1_2, \
|
||||
make_simple_ima_baseline, make_baseline_from_xml, \
|
||||
make_simple_ima_blacklist_baseline, \
|
||||
make_simple_ima_blacklist_baseline_with_hash, \
|
||||
make_simple_ima_blacklist_baseline_with_file_and_hash, \
|
||||
make_simple_ima_blacklist_baseline_with_updated_file_and_hash
|
||||
|
||||
NUMBER_OF_PCRS = 24
|
||||
|
||||
suffix = os.environ.get('RANDOM_SYS_TEST_ID')
|
||||
if suffix != None:
|
||||
print("Configuring with suffix: %s" % suffix)
|
||||
suffix = "-" + suffix
|
||||
else:
|
||||
suffix = ""
|
||||
|
||||
COLLECTOR_LIST = os.environ.get('ENABLED_COLLECTORS').split(',')
|
||||
CLIENT = os.environ.get('CLIENT_HOSTNAME')
|
||||
CLIENT_OS = os.environ.get('CLIENT_OS')
|
||||
TPM_VERSION = os.environ.get('TPM_VERSION')
|
||||
HIRS_SERVER_URL = "https://TBD/HIRS_Portal/"
|
||||
HIRS_ATTESTATION_CA_PORTAL_URL = "https://" + \
|
||||
os.environ.get('HIRS_ACA_PORTAL_IP') +":" + \
|
||||
os.environ.get('HIRS_ACA_PORTAL_PORT') + \
|
||||
"/HIRS_AttestationCAPortal/"
|
||||
TEST_LOG_FILE = os.environ.get('TEST_LOG')
|
||||
LOG_LEVEL = os.environ.get('LOG_LEVEL')
|
||||
|
||||
CA_CERT_LOCATION = "/HIRS/.ci/setup/certs/ca.crt"
|
||||
EK_CA_CERT_LOCATION = "/HIRS/.ci/setup/certs/ek_cert.der"
|
||||
PBaseCertA_LOCATION = "/var/hirs/pc_generation/PBaseCertA.der"
|
||||
PBaseCertB_LOCATION = "/var/hirs/pc_generation/PBaseCertB.der"
|
||||
SIDeltaCertA1_LOCATION = "/var/hirs/pc_generation/SIDeltaCertA1.der"
|
||||
SIDeltaCertA2_LOCATION = "/var/hirs/pc_generation/SIDeltaCertA2.der"
|
||||
SIDeltaCertA2_resolved_LOCATION = "/var/hirs/pc_generation/SIDeltaCertA2_resolved.der"
|
||||
SIDeltaCertA3_LOCATION = "/var/hirs/pc_generation/SIDeltaCertA3.der"
|
||||
VARDeltaCertA1_LOCATION = "/var/hirs/pc_generation/VARDeltaCertA1.der"
|
||||
VARDeltaCertA2_LOCATION = "/var/hirs/pc_generation/VARDeltaCertA2.der"
|
||||
VARDeltaCertA2_resolved_LOCATION = "/var/hirs/pc_generation/VARDeltaCertA2_resolved.der"
|
||||
SIDeltaCertB1_LOCATION = "/var/hirs/pc_generation/SIDeltaCertB1.der"
|
||||
VARDeltaCertB1_LOCATION = "/var/hirs/pc_generation/VARDeltaCertB1.der"
|
||||
|
||||
USB_STORAGE_FILE_HASH = "e164c378ceb45a62642730be5eb3169a6bfc2d6d"
|
||||
USB_STORAGE_FILE_HASH_2 = "e164c378ceb45a62642730be5eb3169a6bfc1234"
|
||||
FORMAT = "%(asctime)-15s %(message)s"
|
||||
provisioner_out = None
|
||||
|
||||
logging.basicConfig(filename=TEST_LOG_FILE,level=eval(LOG_LEVEL), format=FORMAT)
|
||||
logging.info("***************** Beginning of system_test.py *****************")
|
||||
logging.info("The ACA Portal is: " + HIRS_ATTESTATION_CA_PORTAL_URL)
|
||||
|
||||
Portal = HIRSPortal(HIRS_SERVER_URL)
|
||||
AcaPortal = AttestationCAPortal(HIRS_ATTESTATION_CA_PORTAL_URL)
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
class SystemTest(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(self):
|
||||
"""Set the class up"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set the systems tests state up for testing"""
|
||||
AcaPortal.disable_supply_chain_validations()
|
||||
|
||||
def tearDown(self):
|
||||
"""Tears down the state for testing"""
|
||||
|
||||
def test_01_attestation_ca_portal_online(self):
|
||||
"""Test that the Attestation CA Portal is online and accessible by making a GET request.
|
||||
If not online, an exception will be raised since the response code is non-200"""
|
||||
logging.info("***************** Beginning of attestation ca portal online test *****************")
|
||||
AcaPortal.check_is_online()
|
||||
|
||||
@collectors(['IMA', 'TPM'], COLLECTOR_LIST)
|
||||
def test_02_empty_baselines(self):
|
||||
"""Test that appraisal succeeds with empty IMA and TPM baselines"""
|
||||
logging.info("***************** Beginning of empty baseline test *****************")
|
||||
# Portal.set_default_policies(ima_policy=DEFAULT_IMA_POLICY, tpm_policy=DEFAULT_TPM_POLICY)
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result)
|
||||
# self.assertEqual(0, Portal.get_alert_count_from_latest_report())
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
def test_03_small_ima_appraisal(self):
|
||||
"""Test that appraisal works with a small hard-coded IMA baseline
|
||||
|
||||
steps:
|
||||
- upload a small hard-coded required set (two records)
|
||||
- make a policy that points to that baseline as its required set
|
||||
- set the default device group to point to that policy
|
||||
- run a report from the client machine using vagrant ssh
|
||||
"""
|
||||
logging.info("***************** Beginning of small IMA appraisal test *****************")
|
||||
# baseline = make_simple_ima_baseline()
|
||||
# policy_name = Portal.add_ima_policy(required_set=baseline, policy_name_prefix='small_ima')
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result)
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
def test_04_large_ima_appraisal(self):
|
||||
"""Test that appraisal works with a full-size IMA baseline
|
||||
|
||||
steps:
|
||||
- generate an XML report or use a cached one
|
||||
- convert the IMA part of the report into a csv baseline
|
||||
- upload the csv file as an IMA baseline
|
||||
- make a policy that points to that baseline as its required set
|
||||
- set the default device group to point to that policy
|
||||
- run a report from the client machine using vagrant ssh
|
||||
"""
|
||||
logging.info("***************** Beginning of large IMA appraisal test *****************")
|
||||
# empty_ima_policy = Portal.add_ima_policy(required_set=None, policy_name_prefix="empty")
|
||||
# Portal.set_default_policies(ima_policy=empty_ima_policy,
|
||||
# tpm_policy=DEFAULT_TPM_POLICY)
|
||||
# run_hirs_report(CLIENT)
|
||||
# xml_report = Portal.get_latest_report()
|
||||
# baseline = make_baseline_from_xml(xml_report, "IMA")
|
||||
# policy_name = Portal.add_ima_policy(required_set=baseline, unknown_fail="true", policy_name_prefix="large_ima")
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# after_alerts = Portal.get_alerts_from_latest_report()
|
||||
# new_alert_count = after_alerts['recordsTotal']
|
||||
# logging.info("{0} new alerts generated by latest report".format(new_alert_count))
|
||||
# if new_alert_count > 0:
|
||||
# logging.warning("new alert count: " + str(new_alert_count))
|
||||
# #logging.debug("new alerts:\n{0}".format(pprint.pformat(after_alerts['data'][0:new_alert_count])))
|
||||
# self.assertTrue(True)
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
def test_05_small_ima_appraisal_required_set_missing(self):
|
||||
"""Test that appraisal results in an appropriate alert generation when a required set file is missing
|
||||
|
||||
steps:
|
||||
- upload a small hard-coded required set (two records)
|
||||
- add a fictitious file to the baseline
|
||||
- make a policy that points to that baseline as its required set
|
||||
- set the default device group to point to that policy
|
||||
- run a report from the client machine using vagrant ssh
|
||||
- make sure it failed and that one appropriate alert was thrown
|
||||
"""
|
||||
logging.info("***************** Beginning of small IMA appraisal test with required set missing *****************")
|
||||
# baseline = make_simple_ima_baseline()
|
||||
# baseline["name"] = "ima_baseline_missing_required_record_{0}".format(get_current_timestamp())
|
||||
# random_hash = str(hashlib.sha1(str(random.random())).hexdigest())
|
||||
# missing_file = "/required_directory/required_file"
|
||||
# baseline["records"].append({"path": missing_file, "hash": random_hash})
|
||||
# policy_name = Portal.add_ima_policy(required_set=baseline, policy_name_prefix="small_ima_req")
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
#
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertFalse(result)
|
||||
# after_alerts = Portal.get_alerts_from_latest_report()
|
||||
# new_alert_count = after_alerts['recordsTotal']
|
||||
# self.assertEqual(new_alert_count, 1)
|
||||
#
|
||||
# # find the alert with the most recent createTime
|
||||
# latest_alert = max(after_alerts['data'], key=lambda alert: alert['createTime'])
|
||||
# self.assertTrue("MISSING_RECORD" in latest_alert['type'])
|
||||
# self.assertTrue(random_hash in latest_alert['expected'])
|
||||
# self.assertTrue(missing_file in latest_alert['expected'])
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
def test_06_tpm_white_list_appraisal(self):
|
||||
"""Test that appraisal works with a TPM white list baseline
|
||||
|
||||
steps:
|
||||
- run hirs report to generate an XML report for baseline creation
|
||||
- download the latest report in XML format
|
||||
- convert the TPM part of the report into a json baseline
|
||||
- make a policy that points to that json TPM white list baseline
|
||||
- set the default device group to point to that policy
|
||||
- run a report from the client machine
|
||||
"""
|
||||
logging.info("***************** Beginning of TPM white list appraisal test *****************")
|
||||
# empty_ima_policy = Portal.add_ima_policy(required_set=None)
|
||||
# Portal.set_default_policies(ima_policy=empty_ima_policy,
|
||||
# tpm_policy=DEFAULT_TPM_POLICY)
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result)
|
||||
# xml_report = Portal.get_latest_report()
|
||||
# baseline = make_baseline_from_xml(xml_report, "TPM")
|
||||
# policy_name = Portal.add_tpm_wl_policy(baseline, policy_name_prefix="good")
|
||||
# Portal.set_default_policies(tpm_policy=policy_name)
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result)
|
||||
# self.assertEqual(0, Portal.get_alert_count_from_latest_report())
|
||||
#
|
||||
# # create a new baseline with random PCR values
|
||||
# baseline_bad_tpm_pcr = make_baseline_from_xml(xml_report, "TPM")
|
||||
# for pcr_index in range(0, NUMBER_OF_PCRS):
|
||||
# baseline_bad_tpm_pcr["records"][pcr_index]["hash"] = get_random_pcr_hex_value()
|
||||
#
|
||||
# policy_name = Portal.add_tpm_wl_policy(baseline_bad_tpm_pcr, policy_name_prefix='bad_vals')
|
||||
# Portal.set_default_policies(tpm_policy=policy_name)
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertFalse(result)
|
||||
# self.assertEqual(NUMBER_OF_PCRS, Portal.get_alert_count_from_latest_report())
|
||||
#
|
||||
# after_alerts = Portal.get_alerts()
|
||||
#
|
||||
# # for the set of new alerts, verify the alert fields for each PCR value
|
||||
# # the order of the alerts it not necessarily PCR 0, 1, 2... , so we must index
|
||||
# # in to the hash table correctly
|
||||
# for alert_index in range(0, NUMBER_OF_PCRS):
|
||||
# pcr_alert = after_alerts["data"][alert_index]
|
||||
# alert_details = pcr_alert["details"]
|
||||
# pcr_int = int(re.findall(r'\d+', alert_details)[0])
|
||||
#
|
||||
# logging.info("Checking TPM alert for PCR %s", pcr_int)
|
||||
#
|
||||
# self.assertTrue("WHITE_LIST_PCR_MISMATCH" in pcr_alert['type'])
|
||||
# self.assertTrue("TPM_APPRAISER" in pcr_alert['source'])
|
||||
# baseline_hash = baseline_bad_tpm_pcr["records"][pcr_int]["hash"]
|
||||
# reported_hash = baseline["records"][pcr_int]["hash"]
|
||||
#
|
||||
# self.assertTrue(baseline_hash in pcr_alert['expected'])
|
||||
# self.assertTrue(reported_hash in pcr_alert['received'])
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_07_ima_blacklist_appraisal(self):
|
||||
"""Test that appraisal works with a small IMA blacklist baseline
|
||||
|
||||
steps:
|
||||
- upload a policy with a small hard-coded blacklist baseline
|
||||
- set the default device group to point to that policy
|
||||
- run a report from the client machine and ensure the appraisal passes
|
||||
- touch a file on the client that is contained in the blacklist
|
||||
- run another report from the client machine and ensure the appraisal fails
|
||||
"""
|
||||
logging.info("***************** Beginning of blacklist IMA appraisal test *****************")
|
||||
# baseline = make_simple_ima_blacklist_baseline()
|
||||
# policy_name = Portal.add_ima_policy(blacklist=baseline, policy_name_prefix='small_ima_blacklist')
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
#
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result)
|
||||
#
|
||||
# send_command('touch /boot/usb-storage-foo.ko')
|
||||
# #send_command('sudo cat /tmp/usb-storage-foo.ko')
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertFalse(result)
|
||||
#
|
||||
# after_alerts = Portal.get_alerts_from_latest_report()
|
||||
# new_alert_count = after_alerts['recordsTotal']
|
||||
# self.assertEqual(new_alert_count, 1)
|
||||
#
|
||||
# # find the alert with the most recent createTime
|
||||
# latest_alert = after_alerts['data'][0]
|
||||
# self.assertTrue("IMA_BLACKLIST_PATH_MATCH" in latest_alert['type'])
|
||||
# self.assertTrue("usb-storage-foo.ko" in latest_alert['expected'])
|
||||
#
|
||||
# #
|
||||
# # create ima blacklist baseline that contains a hash and generate alert upon detection
|
||||
# #
|
||||
#
|
||||
# # create file and add content to file
|
||||
# send_command('touch /tmp/usb-storage_2.ko')
|
||||
# send_command('echo blacklist >> /tmp/usb-storage_2.ko')
|
||||
# policy_name = Portal.add_ima_policy(blacklist=None,
|
||||
# policy_name_prefix='empty')
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
#
|
||||
# # send report to verify successful appraisal
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result)
|
||||
#
|
||||
# # create blacklist baseline with hash and update policy
|
||||
# baseline = make_simple_ima_blacklist_baseline_with_hash();
|
||||
# policy_name = Portal.add_ima_policy(blacklist=baseline,
|
||||
# policy_name_prefix='small_ima_blacklist_with_hash')
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
#
|
||||
# # trigger measurement of file and run hirs report
|
||||
# send_command('sudo cat /tmp/usb-storage_2.ko')
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertFalse(result)
|
||||
#
|
||||
# after_alerts = Portal.get_alerts_from_latest_report()
|
||||
# new_alert_count = after_alerts['recordsTotal']
|
||||
# self.assertEqual(new_alert_count, 1)
|
||||
#
|
||||
# # find the alert with the most recent createTime
|
||||
# latest_alert = after_alerts['data'][0]
|
||||
# self.assertTrue("IMA_BLACKLIST_HASH_MATCH" in latest_alert['type'])
|
||||
# self.assertTrue(USB_STORAGE_FILE_HASH in latest_alert['expected'])
|
||||
#
|
||||
# #
|
||||
# # create ima blacklist baseline that contains a file and hash and generate alert upon detection
|
||||
# #
|
||||
# policy_name = Portal.add_ima_policy(blacklist=None,
|
||||
# policy_name_prefix='empty')
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
#
|
||||
# # send report to verify successful appraisal
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result)
|
||||
#
|
||||
# # create blacklist baseline with file and hash and update policy
|
||||
# baseline = make_simple_ima_blacklist_baseline_with_file_and_hash();
|
||||
# policy_name = Portal.add_ima_policy(blacklist=baseline,
|
||||
# policy_name_prefix='small_ima_blacklist_with_file_and_hash')
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
#
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertFalse(result)
|
||||
#
|
||||
# after_alerts = Portal.get_alerts_from_latest_report()
|
||||
# new_alert_count = after_alerts['recordsTotal']
|
||||
# self.assertEqual(new_alert_count, 1)
|
||||
#
|
||||
# # find the alert with the most recent createTime
|
||||
# latest_alert = after_alerts['data'][0]
|
||||
# self.assertTrue("IMA_BLACKLIST_PATH_AND_HASH_MATCH" in latest_alert['type'])
|
||||
# self.assertTrue("usb-storage_2.ko" in latest_alert['expected'])
|
||||
# self.assertTrue(USB_STORAGE_FILE_HASH in latest_alert['expected'])
|
||||
#
|
||||
# #
|
||||
# # change ima blacklist baseline file and hash and verify alert is not generated
|
||||
# #
|
||||
#
|
||||
# # create blacklist baseline with file and hash and update policy
|
||||
# baseline = make_simple_ima_blacklist_baseline_with_updated_file_and_hash();
|
||||
# policy_name = Portal.add_ima_policy(blacklist=baseline,
|
||||
# policy_name_prefix='small_ima_blacklist_with_updated_file_and_hash')
|
||||
# Portal.set_default_policies(ima_policy=policy_name)
|
||||
#
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result)
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_08_delta_reports_required_set(self):
|
||||
"""Test that appraisal works with delta reports and required sets.
|
||||
|
||||
steps:
|
||||
- Run hirs report with an empty required set and delta reports
|
||||
enabled
|
||||
- Check first report for success and to make sure the test files
|
||||
are not there
|
||||
- Add the two test files (foo-file and foo-bar-file) to the required
|
||||
set with a hashes that indicates the files are empty
|
||||
- create foo-file and read it as root so it is measured by IMA
|
||||
- Run second hirs report
|
||||
- Check for failed appraisal (foo-bar-file hasn't been created yet)
|
||||
- Check that the report includes foo-file, but not foo-bar-file
|
||||
- Create foo-bar-file and read it as root
|
||||
- Run third hirs report
|
||||
- Check for failed appraisal (foo-file was in the previous report,
|
||||
so it won't be included in this one.
|
||||
- Check that foo-bar-file is in this report, but not foo-file
|
||||
"""
|
||||
|
||||
logging.info("***************** Beginning of Delta Reports required set appraisal test *****************")
|
||||
# unique_name = uuid.uuid4().hex
|
||||
# baseline_name = 'delta-reports-required-baseline-' + unique_name
|
||||
# foo_file_name = 'foo-file-' + unique_name
|
||||
# foo_bar_file_name = 'foo-bar-file-' + unique_name
|
||||
# test_hash = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'
|
||||
#
|
||||
# baseline = {"name": baseline_name,
|
||||
# "description": "a simple hard-coded ima baseline "
|
||||
# "for delta reports systems testing",
|
||||
# "records": []}
|
||||
#
|
||||
# ima_policy = Portal.add_ima_policy(required_set=baseline, delta_reports_enabled="true", policy_name_prefix="delta_with_required_set")
|
||||
# Portal.set_default_policies(ima_policy=ima_policy)
|
||||
# run_hirs_report(CLIENT)
|
||||
# report = Portal.get_latest_report()
|
||||
# found_foo_file = foo_file_name in report
|
||||
# found_foo_bar_file = foo_bar_file_name in report
|
||||
# self.assertFalse(found_foo_file)
|
||||
# self.assertFalse(found_foo_bar_file)
|
||||
#
|
||||
# Portal.add_to_ima_baseline(baseline_name, foo_file_name, test_hash)
|
||||
# Portal.add_to_ima_baseline(baseline_name, foo_bar_file_name, test_hash)
|
||||
#
|
||||
# #create foo_file_name. Don't create foo_bar_file_name yet.
|
||||
# #send_vagrant_command('echo {0} > {1}'.format("test", foo_file_name), CLIENT)
|
||||
# #send_vagrant_command('sudo cat {0}'.format(foo_file_name), CLIENT)
|
||||
# send_command('echo {0} > {1}'.format("test", foo_file_name))
|
||||
# send_command('sudo cat {0}'.format(foo_file_name))
|
||||
#
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertFalse(result, msg="report should fail - " + foo_bar_file_name + " not present")
|
||||
# report = Portal.get_latest_report()
|
||||
# found_foo_file = foo_file_name in report
|
||||
# found_foo_bar_file = foo_bar_file_name in report
|
||||
# self.assertTrue(found_foo_file)
|
||||
# self.assertFalse(found_foo_bar_file)
|
||||
#
|
||||
# send_vagrant_command('echo {0} > {1}'.format("test", foo_bar_file_name), CLIENT)
|
||||
# send_vagrant_command('sudo cat {0}'.format(foo_bar_file_name), CLIENT)
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertFalse(result, msg="delta reporting should fail becuase foo_file was in an earlier report")
|
||||
# report = Portal.get_latest_report()
|
||||
# found_foo_file = foo_file_name in report
|
||||
# found_foo_bar_file = foo_bar_file_name in report
|
||||
# self.assertFalse(found_foo_file)
|
||||
# self.assertTrue(found_foo_bar_file)
|
||||
#
|
||||
# send_vagrant_command('rm {0}'.format(foo_file_name), CLIENT)
|
||||
# send_vagrant_command('rm {0}'.format(foo_bar_file_name), CLIENT)
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_09_delta_reports_whitelist(self):
|
||||
"""Test that appraisal works with delta reports. Each report should be
|
||||
appraised individually. Checks that a failed appraisal can be followed
|
||||
by a successful appraisal if there are no errors in the second delta
|
||||
report.
|
||||
|
||||
steps:
|
||||
- Run hirs report with an empty required set and delta reports
|
||||
enabled
|
||||
- Check first report for success and to make sure the test files
|
||||
are not there
|
||||
- Add a test file (foo-file) to the whitelist with a hash that
|
||||
indicates the file is empty
|
||||
- Create foo-file with contents and read it as root so it is
|
||||
measured by IMA
|
||||
- Run second hirs report
|
||||
- Check for failed appraisal (foo-file should be a whitelist
|
||||
mismatch because the file isn't empty)
|
||||
- Check that the report includes foo-file
|
||||
- Run third hirs report
|
||||
- Check for successful appraisal (the mismatch was in the previous
|
||||
report so it won't be included in this one.
|
||||
- Check that foo-file is not in this report
|
||||
"""
|
||||
|
||||
logging.info("***************** Beginning of Delta Reports whitelist appraisal test *****************")
|
||||
# unique_name = uuid.uuid4().hex
|
||||
# baseline_name = 'delta-reports-whitelist-baseline-' + unique_name
|
||||
# foo_file_name = 'foo-file-' + unique_name
|
||||
# foo_bar_file_name = 'foo-bar-file-' + unique_name
|
||||
# test_hash = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'
|
||||
#
|
||||
# baseline = {"name": baseline_name,
|
||||
# "description": "a simple hard-coded ima baseline "
|
||||
# "for delta reports systems testing",
|
||||
# "records": []}
|
||||
#
|
||||
# ima_policy = Portal.add_ima_policy(whitelist=baseline, delta_reports_enabled="true", policy_name_prefix="delta_with_whitelist")
|
||||
# Portal.set_default_policies(ima_policy=ima_policy)
|
||||
# run_hirs_report(CLIENT)
|
||||
# report = Portal.get_latest_report()
|
||||
# found_foo_file = foo_file_name in report
|
||||
# self.assertFalse(found_foo_file)
|
||||
#
|
||||
# Portal.add_to_ima_baseline(baseline_name, foo_file_name, test_hash)
|
||||
#
|
||||
# #create foo_file_name. Don't create foo_bar_file_name yet.
|
||||
# send_vagrant_command('echo \'foo-file\' > {0}'.format(foo_file_name), CLIENT)
|
||||
# send_vagrant_command('sudo cat {0}'.format(foo_file_name), CLIENT)
|
||||
#
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertFalse(result, msg="report should fail - whitelist mismatch for " + foo_bar_file_name)
|
||||
# report = Portal.get_latest_report()
|
||||
# found_foo_file = foo_file_name in report
|
||||
# self.assertTrue(found_foo_file)
|
||||
#
|
||||
# result = run_hirs_report(CLIENT)
|
||||
# self.assertTrue(result, msg="delta reporting should pass because the mismatched record should be found in a previous report")
|
||||
# report = Portal.get_latest_report()
|
||||
# found_foo_file = foo_file_name in report
|
||||
# self.assertFalse(found_foo_file)
|
||||
#
|
||||
# send_vagrant_command('rm {0}'.format(foo_file_name), CLIENT)
|
||||
|
||||
@collectors(['IMA', 'TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_10_on_demand(self):
|
||||
"""Test that on-demand (server-initiated) appraisal works.
|
||||
|
||||
steps:
|
||||
- push a simple ima baseline
|
||||
- set the policy
|
||||
- touch a random file, take the hash, then remove it
|
||||
- kick off an on-demand report on the server for the default device group
|
||||
- sleep to let the appraisal finish
|
||||
- pull the generated report
|
||||
- check that it passed appraisal
|
||||
- check that it has the random filename and hash
|
||||
- check that it contains a TPM Report
|
||||
"""
|
||||
logging.info("***************** Beginning of on-demand test *****************")
|
||||
# baseline = make_simple_ima_baseline()
|
||||
# policy_name = Portal.add_ima_policy(required_set=baseline, delta_reports_enabled="false", policy_name_prefix='on_demand')
|
||||
# logging.info('on demand policy name: %s', policy_name)
|
||||
# Portal.set_default_policies(ima_policy=policy_name, tpm_policy=DEFAULT_TPM_POLICY)
|
||||
# first_report_summary = Portal.get_latest_report_summary()
|
||||
#
|
||||
# (filename, sha_hash) = touch_random_file_and_remove(CLIENT)
|
||||
# partial_filename = filename.split('/')[-1]
|
||||
# logging.info("touched file {} with hash {}".format(filename, sha_hash))
|
||||
# Portal.start_on_demand()
|
||||
# logging.info("started on-demand appraisal")
|
||||
#
|
||||
# latest_report_summary = None
|
||||
#
|
||||
# attempts = 0
|
||||
# while latest_report_summary == None or latest_report_summary['report']['id'] == first_report_summary['report']['id']:
|
||||
# attempts += 1
|
||||
# time.sleep(20)
|
||||
# latest_report_summary = Portal.get_latest_report_summary()
|
||||
# if attempts == 6:
|
||||
# self.fail("No new report summary was found after 120 seconds; failing.")
|
||||
#
|
||||
# self.assertEqual(latest_report_summary["hirsAppraisalResult"]["appraisalStatus"], 'PASS')
|
||||
#
|
||||
# self.assertTrue(Portal.report_contains_ima_record(
|
||||
# partial_filename, sha_hash, latest_report_summary['report']['id']))
|
||||
# sub_reports = latest_report_summary['report']['reports']
|
||||
# self.assertTrue(any(sr for sr in sub_reports if 'TPMReport' in sr['reportType']),
|
||||
# "report summary should contain a TPMReport as a sub-report")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skip("SELinux issues are preventing repo sync from working")
|
||||
def test_11_failing_ima_appraisal_broad_repo_baseline(self):
|
||||
"""Test that an appraisal not containing expected packages in a broad repo IMA baseline fails.
|
||||
|
||||
steps:
|
||||
- Create a Yum repository with a local file URL and sync it
|
||||
- Create a broad baseline using the Yum repository
|
||||
- Add the baseline to the required set for the default IMA policy
|
||||
- Run a HIRS report and ensure it fails
|
||||
- Ensure that at least one of the expected alerts has been generated
|
||||
"""
|
||||
logging.info("***************** Beginning of broad repo failing appraisal test *****************")
|
||||
# repo_name = "Test Yum Repository"
|
||||
# baseline_name = "Test Broad Baseline"
|
||||
# policy_name = "Test Broad Repo IMA Policy"
|
||||
# repo_url = 'file:///flamethrower/Systems_Tests/resources/repositories/small_yum_repo'
|
||||
#
|
||||
# Portal.configure_yum_repository(repo_name, repo_url)
|
||||
# Portal.create_broad_ima_baseline(baseline_name, repo_name)
|
||||
# Portal.create_policy(policy_name, "IMA")
|
||||
# Portal.add_baseline_to_required_sets(policy_name, baseline_name)
|
||||
# Portal.set_tpm_ima_policy(ima_policy=policy_name, tpm_policy=DEFAULT_TPM_POLICY)
|
||||
#
|
||||
# self.assertFalse(run_hirs_report(CLIENT))
|
||||
# alerts = Portal.get_alerts_from_latest_report()
|
||||
# self.assertTrue(alerts_contain(alerts['data'], {
|
||||
# 'source': 'IMA_APPRAISER',
|
||||
# 'type': 'MISSING_RECORD',
|
||||
# 'expected': '(/usr/lib64/glusterfs/3.7.6/xlator/features/quota.so, SHA-1 - 0xc9b5e8df6b50f2f58ea55fd41a962393d9eeec94)',
|
||||
# }))
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skip("SELinux issues are preventing repo sync from working")
|
||||
@unittest.skipIf(is_ubuntu_client(CLIENT_OS), "Skipping this test due to client OS " + CLIENT_OS)
|
||||
def test_12_successful_ima_appraisal_broad_repo_baseline(self):
|
||||
"""Test that an appraisal containing expected packages in a broad repo IMA baseline passes.
|
||||
This test only works on CentOS 6 and 7.
|
||||
|
||||
steps:
|
||||
- Create a Yum repository with a local file URL and sync it
|
||||
- Create a broad baseline using the Yum repository
|
||||
- Add the baseline to the required set for the default IMA policy
|
||||
- Install RPMs in repository to client machine and read them with root to ensure their placement in the IMA log
|
||||
- Run a HIRS report and ensure it passes
|
||||
- Ensure that there are no new alerts
|
||||
"""
|
||||
logging.info("***************** Beginning of broad repo successful appraisal test *****************")
|
||||
# repo_name = "Test Yum Repository"
|
||||
# baseline_name = "Test Broad Baseline"
|
||||
# policy_name = "Test Broad Repo IMA Policy"
|
||||
# repo_url = 'file:///flamethrower/Systems_Tests/resources/repositories/two_package_yum_repo'
|
||||
#
|
||||
# Portal.configure_yum_repository(repo_name, repo_url)
|
||||
# Portal.create_broad_ima_baseline(baseline_name, repo_name)
|
||||
# Portal.create_policy(policy_name, "IMA")
|
||||
# Portal.add_baseline_to_required_sets(policy_name, baseline_name)
|
||||
# Portal.set_partial_paths_for_ima_policy(policy_name, True)
|
||||
# Portal.set_tpm_ima_policy(ima_policy=policy_name, tpm_policy=DEFAULT_TPM_POLICY)
|
||||
#
|
||||
# if CLIENT_OS in ["centos6", "centos7"]:
|
||||
# send_vagrant_command("sudo rpm -i --force /flamethrower/Systems_Tests/resources/repositories/two_package_yum_repo/SimpleTest1-1-1.noarch.rpm", CLIENT)
|
||||
# send_vagrant_command("sudo rpm -i --force /flamethrower/Systems_Tests/resources/repositories/two_package_yum_repo/SimpleTest2-1-1.noarch.rpm", CLIENT)
|
||||
# else:
|
||||
# logging.error("unsupported client os: %s", CLIENT_OS)
|
||||
#
|
||||
# send_vagrant_command("sudo find /opt/simpletest -type f -exec head {} \;", CLIENT)
|
||||
#
|
||||
# self.assertTrue(run_hirs_report(CLIENT))
|
||||
# self.assertEqual(Portal.get_alert_count_from_latest_report(), 0)
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_1_2(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_13_tpm_1_2_initial_provision(self):
|
||||
"""Test that running the TPM 1.2 hirs provisioner works"""
|
||||
logging.info("***************** Beginning of initial TPM 1.2 provisioner run *****************")
|
||||
|
||||
# Run the provisioner to ensure that it provisions successfully
|
||||
provisioner_out = run_hirs_provisioner_tpm_1_2(CLIENT)
|
||||
print("Initial TPM 1.2 provisioner run output: {0}".format(provisioner_out))
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_14_tpm_2_0_initial_provision(self):
|
||||
"""Test that running the TPM 2.0 hirs provisioner works"""
|
||||
logging.info("***************** Beginning of initial TPM 2.0 provisioner run *****************")
|
||||
|
||||
# Run the provisioner to ensure that it provisions successfully
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
print("Initial TPM 2.0 provisioner run output: {0}".format(provisioner_out))
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_15_device_info_report_stored_after_provisioning(self):
|
||||
"""Test that running the hirs provisioner results in storing a device info report for
|
||||
the device in the DB"""
|
||||
logging.info("***************** Beginning of device info report test *****************")
|
||||
|
||||
logging.info("Getting devices from ACA portal...")
|
||||
aca_portal_devices = AcaPortal.get_devices()
|
||||
self.assertEqual(aca_portal_devices['recordsTotal'], 1)
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_16_supply_chain_validation_summary_stored_after_second_provisioning(self):
|
||||
"""Test that running the hirs provisioner, a second time, results in storing a supply chain validation
|
||||
record in the database"""
|
||||
logging.info("***************** Beginning of supply chain validation summary test *****************")
|
||||
|
||||
logging.info("Uploading CA cert: " + CA_CERT_LOCATION)
|
||||
AcaPortal.upload_ca_cert(CA_CERT_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
print("Second provisioner run output: {0}".format(provisioner_out))
|
||||
|
||||
supply_chain_validation_summaries = AcaPortal.get_supply_chain_validation_summaries()
|
||||
# verify this is one SCVS record indicating PASS
|
||||
self.assertEqual(supply_chain_validation_summaries['recordsTotal'], 2)
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][0]['overallValidationResult'], "PASS")
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][1]['overallValidationResult'], "PASS")
|
||||
|
||||
# verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_17_ek_info_report(self):
|
||||
"""Test that running the hirs provisioner results in storing EK certs info report for
|
||||
the device in the DB"""
|
||||
logging.info("***************** Beginning of Endorsement Certs info report test *****************")
|
||||
|
||||
logging.info("Getting EK Certs from ACA portal...")
|
||||
cert_list = AcaPortal.get_ek_certs()
|
||||
self.assertEqual(cert_list['recordsTotal'], 1)
|
||||
self.assertEqual(cert_list['data'][0]['credentialType'], "TCPA Trusted Platform Module Endorsement")
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_18_pk_info_report(self):
|
||||
"""Test that running the hirs provisioner results in storing PK certs info report for
|
||||
the device in the DB"""
|
||||
logging.info("***************** Beginning Platform Certs info report test *****************")
|
||||
|
||||
logging.info("Getting PK Certs from ACA portal...")
|
||||
cert_list = AcaPortal.get_pk_certs()
|
||||
self.assertEqual(cert_list['recordsTotal'], 1)
|
||||
self.assertEqual(cert_list['data'][0]['credentialType'], "TCG Trusted Platform Endorsement")
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_19_trust_chain_info_report(self):
|
||||
"""Test that running the hirs provisioner results in storing trust chains info report for
|
||||
the device in the DB"""
|
||||
logging.info("***************** Beginning of Trust Chain info report test *****************")
|
||||
logging.info("Getting Trust Chains from ACA portal...")
|
||||
trust_chain_list = AcaPortal.get_trust_chains()
|
||||
self.assertEqual(trust_chain_list['recordsTotal'], 1)
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A1_base_delta(self):
|
||||
"""Test Delta Certificates A1 - Provisioning with Good Base Platform Cert (via Platform Cert on TPM Emulator)"""
|
||||
logging.info("***************** test_20_A1 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform Cert (via Platform Cert on TPM Emulator)")
|
||||
|
||||
logging.info("Check if ACA is online...")
|
||||
AcaPortal.check_is_online()
|
||||
|
||||
logging.info("Uploading CA Cert: " + CA_CERT_LOCATION)
|
||||
AcaPortal.upload_ca_cert(CA_CERT_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A1_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device supply chain appraisal result is PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A2_base_delta(self):
|
||||
"""Test Delta Certificates A2 - Attempt to upload Base cert with holder already having a Base Platform Cert associated with it"""
|
||||
logging.info("***************** test_20_A2 - Beginning of delta certificate test *****************")
|
||||
logging.info("Attempt to upload PBaseCertB, with PBaseCertA already loaded in the ACA.")
|
||||
|
||||
print("test_20_A2_base_delta. PBaseCertA has already been loaded. Attempting to upload second Platform Cert: %s" % (PBaseCertB_LOCATION))
|
||||
|
||||
# Confirm there is one Platform Base Cert already loaded
|
||||
cert_list = AcaPortal.get_pk_certs()
|
||||
self.assertEqual(cert_list['recordsTotal'], 1)
|
||||
print("Number of Platform Certs: %d" % (cert_list['recordsTotal']))
|
||||
self.assertEqual(cert_list['data'][0]['credentialType'], "TCG Trusted Platform Endorsement")
|
||||
self.assertEqual(cert_list['data'][0]['platformType'], "Base")
|
||||
|
||||
# Try uploading a second Platform Base Cert
|
||||
print("Attempting to upload a second Platform Base Cert...")
|
||||
AcaPortal.upload_pk_cert(PBaseCertB_LOCATION)
|
||||
|
||||
# Confirm Platform Base Cert has not been loaded
|
||||
cert_list = AcaPortal.get_pk_certs()
|
||||
self.assertEqual(cert_list['recordsTotal'], 1)
|
||||
print("Number of Platform Certs: %d" % (cert_list['recordsTotal']))
|
||||
self.assertEqual(cert_list['data'][0]['credentialType'], "TCG Trusted Platform Endorsement")
|
||||
self.assertEqual(cert_list['data'][0]['platformType'], "Base")
|
||||
|
||||
if (cert_list['recordsTotal'] == 1):
|
||||
print ("SUCCESS.\n")
|
||||
else:
|
||||
print ("FAILED.\n")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A3_base_delta(self):
|
||||
"""Test Delta Certificates A3 - Provisioning with Good Base Platform Cert Base and 1 Delta Cert"""
|
||||
logging.info("***************** test_20_A3 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform Cert Base and 1 Delta Cert")
|
||||
|
||||
# Verify device supply chain appraisal result is PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
# Upload the SIDeltaCertA1 and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertA1_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
print("test_20_A3_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
supply_chain_validation_summaries = AcaPortal.get_supply_chain_validation_summaries()
|
||||
# Verify this is one SCVS record indicating PASS
|
||||
self.assertEqual(supply_chain_validation_summaries['recordsTotal'], 2)
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][0]['overallValidationResult'], "PASS")
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][1]['overallValidationResult'], "PASS")
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A4_base_delta(self):
|
||||
"""Test Delta Certificates A4 - Provisioning with Good Base Platform Cert Base and 2 Delta Certs"""
|
||||
logging.info("***************** test_20_A4 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform Cert Base and 2 Delta Certs")
|
||||
|
||||
# Verify device supply chain appraisal result is PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
# Upload the VARDeltaCertA1 and provision
|
||||
AcaPortal.upload_pk_cert(VARDeltaCertA1_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A4_base_delta run output: {0}".format(provisioner_out))
|
||||
supply_chain_validation_summaries = AcaPortal.get_supply_chain_validation_summaries()
|
||||
|
||||
# Verify this is one SCVS record indicating PASS
|
||||
self.assertEqual(supply_chain_validation_summaries['recordsTotal'], 3)
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][0]['overallValidationResult'], "PASS")
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][1]['overallValidationResult'], "PASS")
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][2]['overallValidationResult'], "PASS")
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A5_base_delta(self):
|
||||
"""Test Delta Certificates A5 - Provisioning with Good Base Platform Cert and 1 Bad Delta Cert"""
|
||||
logging.info("***************** test_20_A5 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform Cert and 1 Bad Delta Cert")
|
||||
|
||||
# TODO: Determine if we need this test
|
||||
|
||||
# # Verify device supply chain appraisal result is PASS
|
||||
# devices = AcaPortal.get_devices()
|
||||
# self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
#
|
||||
# # Upload the VARDelta cert and provision
|
||||
# AcaPortal.upload_pk_cert(SIDeltaCertA2_LOCATION)
|
||||
# AcaPortal.enable_supply_chain_validations()
|
||||
# provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
#
|
||||
# print("test_19_A4_base_delta SHOULD FAIL provisioning!!")
|
||||
# print("test_19_A4_base_delta run output: {0}".format(provisioner_out))
|
||||
#
|
||||
# # Provisioning should fail since the Delta contains a bad component.
|
||||
# self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A6_base_delta(self):
|
||||
"""Test Delta Certificates A6 - Provisioning with Good Base Platform, 2 Good Delta Certs and 1 Bad Delta Cert"""
|
||||
logging.info("***************** test_20_A6 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform, 2 Good Delta Certs and 1 Bad Delta Cert")
|
||||
|
||||
# Verify device supply chain appraisal result is PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
# Upload the SIDeltaCertA2 and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertA2_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A6_base_delta SHOULD FAIL provisioning using: %s" % (SIDeltaCertA2_LOCATION))
|
||||
print("test_20_A6_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Provisioning should fail since the Delta contains a bad component.
|
||||
self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
# Upload the SIDeltaCertA2_resolved and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertA2_resolved_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A6_base_delta SHOULD PASS provisioning using: %s" % (SIDeltaCertA2_resolved_LOCATION))
|
||||
print("test_20_A6_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], devices['data'][0]['device']['supplyChainStatus'])
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A7_base_delta(self):
|
||||
"""Test Delta Certificates A7 - Provisioning with Good Base Platform, 2 Good Delta Certs and
|
||||
1 Bad Delta Cert with non present component"""
|
||||
logging.info("***************** test_20_A7 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform, 2 Good Delta Certs and 1 Bad Delta Cert with non present component")
|
||||
|
||||
# Upload the VARDeltaCertA2 and provision
|
||||
AcaPortal.upload_pk_cert(VARDeltaCertA2_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A7_base_delta SHOULD FAIL provisioning using: %s" % (VARDeltaCertA2_LOCATION))
|
||||
print("test_20_A7_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Provisioning should fail since the Delta contains a component thats not in the Base
|
||||
self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
# Upload the VARDeltaCertA2_resolved and provision
|
||||
AcaPortal.upload_pk_cert(VARDeltaCertA2_resolved_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A7_base_delta SHOULD PASS provisioning using: %s" % (VARDeltaCertA2_resolved_LOCATION))
|
||||
print("test_20_A7_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], devices['data'][0]['device']['supplyChainStatus'])
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A8_base_delta(self):
|
||||
"""Test Delta Certificates A8 - Provisioning with Good Base Platform, 2 Good Delta Certs with 1 Delta cert
|
||||
replacing component from previous, using the Delta as a base certificate"""
|
||||
logging.info("***************** test_20_A8 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform, 2 Good Delta Certs with 1 Delta cert replacing component from previous, using the Delta as a base certificate")
|
||||
|
||||
# Upload the SIDeltaCertA3 and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertA3_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A8_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], devices['data'][0]['device']['supplyChainStatus'])
|
||||
|
||||
@collectors(['BASE_DELTA_BAD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_B1_base_delta(self):
|
||||
"""Test Base/Delta Certificates B1 - Provisioning with Bad Platform Cert Base """
|
||||
logging.info("***************** test_20_B1 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Bad Platform Cert Base")
|
||||
|
||||
logging.info("Check if ACA is online...")
|
||||
AcaPortal.check_is_online()
|
||||
|
||||
logging.info("Uploading CA cert: " + CA_CERT_LOCATION)
|
||||
AcaPortal.upload_ca_cert(CA_CERT_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_B1_base_delta SHOULD FAIL provisioning using: %s" % (PBaseCertB_LOCATION))
|
||||
print("test_20_B1_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Provisioning should fail since the PC contains FAULTY components.
|
||||
self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
@collectors(['BASE_DELTA_BAD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_B2_base_delta(self):
|
||||
"""Test Base/Delta Certificates B2 - Provisioning with Bad Platform Cert Base and 1 Good delta with 1 bad component unresolved"""
|
||||
logging.info("***************** test_20_B2 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Bad Platform Cert Base and 1 Good delta with 1 bad component unresolved")
|
||||
|
||||
# Verify device supply chain appraisal result is FAIL
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "FAIL")
|
||||
|
||||
# Upload the SIDeltaCertB1 and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertB1_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_B2_base_delta SHOULD FAIL provisioning using: %s" % (SIDeltaCertB1_LOCATION))
|
||||
print("test_20_B2_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Provisioning should fail since the delta contains FAULTY component.
|
||||
self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
@collectors(['BASE_DELTA_BAD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_B3_base_delta(self):
|
||||
"""Test Base/Delta Certificates B3 - Provisioning with Bad Platform Cert Base and 2 Good delta with all component resolved"""
|
||||
logging.info("***************** test_20_B3 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Bad Platform Cert Base and 2 Good delta with all component resolved")
|
||||
|
||||
# Verify device supply chain appraisal result is FAIL
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "FAIL")
|
||||
|
||||
# Upload the VARDeltaCertB1 and provision
|
||||
AcaPortal.upload_pk_cert(VARDeltaCertB1_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_B3_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device has been updated with supply chain appraisal of PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
if __name__ == '__main__':
|
||||
suite = unittest.TestLoader().loadTestsFromTestCase(SystemTest)
|
||||
ret = not unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful()
|
||||
sys.exit(ret)
|
@ -1,633 +0,0 @@
|
||||
# System Test Driver to help with debugging.
|
||||
|
||||
from __future__ import print_function
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
import urllib3
|
||||
|
||||
from system_test_core import DEFAULT_IMA_POLICY, DEFAULT_TPM_POLICY, \
|
||||
HIRSPortal, AttestationCAPortal, collectors, \
|
||||
send_command, send_command_sha1sum, run_hirs_report, run_hirs_provisioner_tpm_1_2, \
|
||||
run_hirs_provisioner_tpm_2_0, parse_xml_with_stripped_namespaces, get_current_timestamp, \
|
||||
get_all_nodes_recursively, touch_random_file_and_remove, get_random_pcr_hex_value, \
|
||||
is_ubuntu_client, is_tpm_2_0, is_tpm_1_2 \
|
||||
|
||||
NUMBER_OF_PCRS = 24
|
||||
|
||||
suffix = os.environ.get('RANDOM_SYS_TEST_ID')
|
||||
if suffix != None:
|
||||
print("Configuring with suffix: %s" % suffix)
|
||||
suffix = "-" + suffix
|
||||
else:
|
||||
suffix = ""
|
||||
|
||||
# Change to point to your HIRS directory
|
||||
#HOME_DIR = "/HIRS/"
|
||||
HOME_DIR = "/workspace/git/python2to3-dev-3/"
|
||||
HIRS_ACA_PORTAL_IP="172.17.0.2"
|
||||
TPM_VERSION="2.0"
|
||||
#TPM_VERSION="1.2"
|
||||
# Change accordingly
|
||||
#COLLECTOR_LIST = None
|
||||
#COLLECTOR_LIST = ["IMA"]
|
||||
COLLECTOR_LIST = ["TPM"]
|
||||
#COLLECTOR_LIST = ["IMA", "TPM"]
|
||||
#COLLECTOR_LIST = ["BASE_DELTA_GOOD"]
|
||||
#COLLECTOR_LIST = ["BASE_DELTA_BAD"]
|
||||
|
||||
FORMAT = "%(asctime)-15s %(message)s"
|
||||
provisioner_out = None
|
||||
|
||||
HIRS_ACA_PROVISIONER_IP="172.19.0.3"
|
||||
HIRS_ACA_PROVISIONER_TPM2_IP="172.19.0.4"
|
||||
TPM_ENABLED=True
|
||||
IMA_ENABLED=False
|
||||
|
||||
HIRS_ACA_PORTAL_PORT="8443"
|
||||
HIRS_BROKER_PORT="61616"
|
||||
HIRS_ACA_PORTAL_CONTAINER_PORT="80"
|
||||
HIRS_ACA_HOSTNAME="hirsaca"
|
||||
HIRS_SUBNET="172.19.0.0/16"
|
||||
CLIENT_OS="centos7"
|
||||
CLIENT_HOSTNAME="hirs-client-"+ CLIENT_OS + "-tpm2"
|
||||
CLIENT=CLIENT_HOSTNAME
|
||||
SERVER_OS="$CLIENT_OS"
|
||||
SERVER_HOSTNAME="hirs-appraiser-$SERVER_OS"
|
||||
|
||||
HIRS_ATTESTATION_CA_PORTAL_URL = "https://" + \
|
||||
HIRS_ACA_PORTAL_IP + ":" + \
|
||||
HIRS_ACA_PORTAL_PORT + \
|
||||
"/HIRS_AttestationCAPortal/"
|
||||
|
||||
CA_CERT_LOCATION = HOME_DIR + ".ci/setup/certs/ca.crt"
|
||||
EK_CA_CERT_LOCATION = HOME_DIR + ".ci/setup/certs/ek_cert.der"
|
||||
PBaseCertA_LOCATION = HOME_DIR + "PBaseCertA.der"
|
||||
PBaseCertB_LOCATION = HOME_DIR + "PBaseCertB.der"
|
||||
SIDeltaCertA1_LOCATION = HOME_DIR + "SIDeltaCertA1.der"
|
||||
SIDeltaCertA2_resolved_LOCATION = HOME_DIR + "SIDeltaCertA2_resolved.der"
|
||||
SIDeltaCertA2_LOCATION = HOME_DIR + "SIDeltaCertA2.der"
|
||||
SIDeltaCertA3_LOCATION = HOME_DIR + "SIDeltaCertA3.der"
|
||||
VARDeltaCertA1_LOCATION = HOME_DIR + "VARDeltaCertA1.der"
|
||||
VARDeltaCertA2_LOCATION = HOME_DIR + "VARDeltaCertA2.der"
|
||||
VARDeltaCertA2_resolved_LOCATION = HOME_DIR + "VARDeltaCertA2_resolved.der"
|
||||
SIDeltaCertB1_LOCATION = HOME_DIR + "SIDeltaCertB1.der"
|
||||
VARDeltaCertB1_LOCATION = HOME_DIR + "VARDeltaCertB1.der"
|
||||
|
||||
TEST_LOG_FILE= HOME_DIR + ".ci/system-tests/test_logs/system_test_" + CLIENT_OS + ".log"
|
||||
LOG_LEVEL="logging.INFO"
|
||||
|
||||
print("Start of Log file: " + TEST_LOG_FILE)
|
||||
logging.basicConfig(filename=TEST_LOG_FILE,level=eval(LOG_LEVEL), format=FORMAT)
|
||||
logging.info("*****************beginning of system_test.py*****************")
|
||||
logging.info("The Collector list is: " + ' '.join(COLLECTOR_LIST))
|
||||
logging.info("The ACA Portal is: " + HIRS_ATTESTATION_CA_PORTAL_URL)
|
||||
|
||||
#Portal = HIRSPortal(HIRS_SERVER_URL)
|
||||
AcaPortal = AttestationCAPortal(HIRS_ATTESTATION_CA_PORTAL_URL)
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
class SystemTest(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(self):
|
||||
"""Set the class up"""
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(self):
|
||||
"""Tears down the class"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set the systems tests state up for testing"""
|
||||
AcaPortal.disable_supply_chain_validations()
|
||||
|
||||
def tearDown(self):
|
||||
"""Tears down the state for testing"""
|
||||
|
||||
def test_01_attestation_ca_portal_online(self):
|
||||
"""Test that the Attestation CA Portal is online and accessible by making a GET request.
|
||||
If not online, an exception will be raised since the response code is non-200"""
|
||||
logging.info("***************** Beginning of attestation ca portal online test *****************")
|
||||
AcaPortal.check_is_online()
|
||||
|
||||
@collectors(['IMA', 'TPM'], COLLECTOR_LIST)
|
||||
def test_02_empty_baselines(self):
|
||||
"""Test that appraisal succeeds with empty IMA and TPM baselines"""
|
||||
logging.info("***************** Beginning of empty baseline test *****************")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
def test_03_small_ima_appraisal(self):
|
||||
"""Test that appraisal works with a small hard-coded IMA baseline"""
|
||||
logging.info("***************** Beginning of small IMA appraisal test *****************")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
def test_04_large_ima_appraisal(self):
|
||||
"""Test that appraisal works with a full-size IMA baseline"""
|
||||
logging.info("***************** Beginning of large IMA appraisal test *****************")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
def test_05_small_ima_appraisal_required_set_missing(self):
|
||||
"""Test that appraisal results in an appropriate alert generation when a required set file is missing
|
||||
|
||||
steps:
|
||||
- upload a small hard-coded required set (two records)
|
||||
- add a fictitious file to the baseline
|
||||
- make a policy that points to that baseline as its required set
|
||||
- set the default device group to point to that policy
|
||||
- run a report from the client machine using vagrant ssh
|
||||
- make sure it failed and that one appropriate alert was thrown
|
||||
"""
|
||||
logging.info("***************** Beginning of small IMA appraisal test with required set missing *****************")
|
||||
|
||||
@collectors(['TPM', 'IMA'], COLLECTOR_LIST)
|
||||
def test_06_tpm_white_list_appraisal(self):
|
||||
"""Test that appraisal works with a TPM white list baseline
|
||||
|
||||
steps:
|
||||
- run hirs report to generate an XML report for baseline creation
|
||||
- download the latest report in XML format
|
||||
- convert the TPM part of the report into a json baseline
|
||||
- make a policy that points to that json TPM white list baseline
|
||||
- set the default device group to point to that policy
|
||||
- run a report from the client machine
|
||||
"""
|
||||
logging.info("***************** Beginning of TPM white list appraisal test *****************")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_07_ima_blacklist_appraisal(self):
|
||||
"""Test that appraisal works with a small IMA blacklist baseline
|
||||
|
||||
steps:
|
||||
- upload a policy with a small hard-coded blacklist baseline
|
||||
- set the default device group to point to that policy
|
||||
- run a report from the client machine and ensure the appraisal passes
|
||||
- touch a file on the client that is contained in the blacklist
|
||||
- run another report from the client machine and ensure the appraisal fails
|
||||
"""
|
||||
logging.info("***************** Beginning of blacklist IMA appraisal test *****************")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_08_delta_reports_required_set(self):
|
||||
"""Test that appraisal works with delta reports and required sets.
|
||||
|
||||
steps:
|
||||
- Run hirs report with an empty required set and delta reports
|
||||
enabled
|
||||
- Check first report for success and to make sure the test files
|
||||
are not there
|
||||
- Add the two test files (foo-file and foo-bar-file) to the required
|
||||
set with a hashes that indicates the files are empty
|
||||
- create foo-file and read it as root so it is measured by IMA
|
||||
- Run second hirs report
|
||||
- Check for failed appraisal (foo-bar-file hasn't been created yet)
|
||||
- Check that the report includes foo-file, but not foo-bar-file
|
||||
- Create foo-bar-file and read it as root
|
||||
- Run third hirs report
|
||||
- Check for failed appraisal (foo-file was in the previous report,
|
||||
so it won't be included in this one.
|
||||
- Check that foo-bar-file is in this report, but not foo-file
|
||||
"""
|
||||
logging.info("***************** Beginning of Delta Reports required set appraisal test *****************")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_09_delta_reports_whitelist(self):
|
||||
"""Test that appraisal works with delta reports. Each report should be
|
||||
appraised individually. Checks that a failed appraisal can be followed
|
||||
by a successful appraisal if there are no errors in the second delta
|
||||
report.
|
||||
|
||||
steps:
|
||||
- Run hirs report with an empty required set and delta reports
|
||||
enabled
|
||||
- Check first report for success and to make sure the test files
|
||||
are not there
|
||||
- Add a test file (foo-file) to the whitelist with a hash that
|
||||
indicates the file is empty
|
||||
- Create foo-file with contents and read it as root so it is
|
||||
measured by IMA
|
||||
- Run second hirs report
|
||||
- Check for failed appraisal (foo-file should be a whitelist
|
||||
mismatch because the file isn't empty)
|
||||
- Check that the report includes foo-file
|
||||
- Run third hirs report
|
||||
- Check for successful appraisal (the mismatch was in the previous
|
||||
report so it won't be included in this one.
|
||||
- Check that foo-file is not in this report
|
||||
"""
|
||||
logging.info("***************** Beginning of Delta Reports whitelist appraisal test *****************")
|
||||
|
||||
@collectors(['IMA', 'TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_10_on_demand(self):
|
||||
"""Test that on-demand (server-initiated) appraisal works.
|
||||
|
||||
steps:
|
||||
- push a simple ima baseline
|
||||
- set the policy
|
||||
- touch a random file, take the hash, then remove it
|
||||
- kick off an on-demand report on the server for the default device group
|
||||
- sleep to let the appraisal finish
|
||||
- pull the generated report
|
||||
- check that it passed appraisal
|
||||
- check that it has the random filename and hash
|
||||
- check that it contains a TPM Report
|
||||
"""
|
||||
logging.info("***************** Beginning of on-demand test *****************")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skip("SELinux issues are preventing repo sync from working")
|
||||
def test_11_failing_ima_appraisal_broad_repo_baseline(self):
|
||||
"""Test that an appraisal not containing expected packages in a broad repo IMA baseline fails.
|
||||
|
||||
steps:
|
||||
- Create a Yum repository with a local file URL and sync it
|
||||
- Create a broad baseline using the Yum repository
|
||||
- Add the baseline to the required set for the default IMA policy
|
||||
- Run a HIRS report and ensure it fails
|
||||
- Ensure that at least one of the expected alerts has been generated
|
||||
"""
|
||||
logging.info("***************** Beginning of broad repo failing appraisal test *****************")
|
||||
|
||||
@collectors(['IMA'], COLLECTOR_LIST)
|
||||
@unittest.skip("SELinux issues are preventing repo sync from working")
|
||||
@unittest.skipIf(is_ubuntu_client(CLIENT_OS), "Skipping this test due to client OS " + CLIENT_OS)
|
||||
def test_12_successful_ima_appraisal_broad_repo_baseline(self):
|
||||
"""Test that an appraisal containing expected packages in a broad repo IMA baseline passes.
|
||||
This test only works on CentOS 6 and 7.
|
||||
|
||||
steps:
|
||||
- Create a Yum repository with a local file URL and sync it
|
||||
- Create a broad baseline using the Yum repository
|
||||
- Add the baseline to the required set for the default IMA policy
|
||||
- Install RPMs in repository to client machine and read them with root to ensure their placement in the IMA log
|
||||
- Run a HIRS report and ensure it passes
|
||||
- Ensure that there are no new alerts
|
||||
"""
|
||||
logging.info("***************** Beginning of broad repo successful appraisal test *****************")
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_1_2(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_13_tpm_1_2_initial_provision(self):
|
||||
"""Test that running the TPM 1.2 hirs provisioner works"""
|
||||
logging.info("***************** Beginning of initial TPM 1.2 provisioner run *****************")
|
||||
|
||||
# Run the provisioner to ensure that it provisions successfully
|
||||
provisioner_out = run_hirs_provisioner_tpm_1_2(CLIENT)
|
||||
print("Initial TPM 1.2 provisioner run output: {0}".format(provisioner_out))
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_14_tpm_2_0_initial_provision(self):
|
||||
"""Test that running the TPM 2.0 hirs provisioner works"""
|
||||
logging.info("***************** Beginning of initial TPM 2.0 provisioner run *****************")
|
||||
|
||||
# Run the provisioner to ensure that it provisions successfully
|
||||
provisioner_out = run_hirs_provisioner_tpm2(CLIENT)
|
||||
print("Initial provisioner run output: {0}".format(provisioner_out))
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_15_device_info_report_stored_after_provisioning(self):
|
||||
"""Test that running the hirs provisioner results in storing a device info report for
|
||||
the device in the DB"""
|
||||
logging.info("***************** Beginning of device info report test *****************")
|
||||
|
||||
logging.info("Getting devices from ACA portal...")
|
||||
aca_portal_devices = AcaPortal.get_devices()
|
||||
self.assertEqual(aca_portal_devices['recordsTotal'], 1)
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_16_supply_chain_validation_summary_stored_after_second_provisioning(self):
|
||||
"""Test that running the hirs provisioner, a second time, results in storing a supply chain validation
|
||||
record in the database"""
|
||||
logging.info("***************** Beginning of supply chain validation summary test *****************")
|
||||
|
||||
logging.info("Uploading CA cert: " + CA_CERT_LOCATION)
|
||||
AcaPortal.upload_ca_cert(CA_CERT_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
print("Second provisioner run output: {0}".format(provisioner_out))
|
||||
|
||||
supply_chain_validation_summaries = AcaPortal.get_supply_chain_validation_summaries()
|
||||
# verify this is one SCVS record indicating PASS
|
||||
self.assertEqual(supply_chain_validation_summaries['recordsTotal'], 2)
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][0]['overallValidationResult'], "PASS")
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][1]['overallValidationResult'], "PASS")
|
||||
|
||||
# verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_17_ek_info_report(self):
|
||||
"""Test that running the hirs provisioner results in storing EK certs info report for
|
||||
the device in the DB"""
|
||||
logging.info("***************** Beginning of Endorsement Certs info report test *****************")
|
||||
|
||||
logging.info("Getting EK Certs from ACA portal...")
|
||||
cert_list = AcaPortal.get_ek_certs()
|
||||
self.assertEqual(cert_list['recordsTotal'], 1)
|
||||
self.assertEqual(cert_list['data'][0]['credentialType'], "TCPA Trusted Platform Module Endorsement")
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_18_pk_info_report(self):
|
||||
"""Test that running the hirs provisioner results in storing PK certs info report for
|
||||
the device in the DB"""
|
||||
logging.info("***************** Beginning Platform Certs info report test *****************")
|
||||
|
||||
logging.info("Getting PK Certs from ACA portal...")
|
||||
cert_list = AcaPortal.get_pk_certs()
|
||||
self.assertEqual(cert_list['recordsTotal'], 1)
|
||||
self.assertEqual(cert_list['data'][0]['credentialType'], "TCG Trusted Platform Endorsement")
|
||||
|
||||
@collectors(['TPM'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_19_trust_chain_info_report(self):
|
||||
"""Test that running the hirs provisioner results in storing trust chains info report for
|
||||
the device in the DB"""
|
||||
logging.info("***************** Beginning of Trust Chain info report test *****************")
|
||||
|
||||
logging.info("Getting Trust Chains from ACA portal...")
|
||||
trust_chain_list = AcaPortal.get_trust_chains()
|
||||
self.assertEqual(trust_chain_list['recordsTotal'], 1)
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A1_base_delta(self):
|
||||
"""Test Delta Certificates A1 - Provisioning with Good Base Platform Cert (via Platform Cert on TPM Emulator)"""
|
||||
logging.info("***************** test_20_A1 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform Cert (via Platform Cert on TPM Emulator)")
|
||||
|
||||
logging.info("Check if ACA is online...")
|
||||
AcaPortal.check_is_online()
|
||||
|
||||
logging.info("Uploading CA Cert: " + CA_CERT_LOCATION)
|
||||
AcaPortal.upload_ca_cert(CA_CERT_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A1_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device supply chain appraisal result is PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A2_base_delta(self):
|
||||
"""Test Delta Certificates A2 - Attempt to upload Base cert with holder already having a Base Platform Cert associated with it"""
|
||||
logging.info("***************** test_20_A2 - Beginning of delta certificate test *****************")
|
||||
logging.info("Attempt to upload PBaseCertB, with PBaseCertA already loaded in the ACA.")
|
||||
|
||||
print("test_20_A2_base_delta. PBaseCertA has already been loaded. Attempting to upload second Platform Cert: %s" % (PBaseCertB_LOCATION))
|
||||
|
||||
# Confirm there is one Platform Base Cert already loaded
|
||||
cert_list = AcaPortal.get_pk_certs()
|
||||
self.assertEqual(cert_list['recordsTotal'], 1)
|
||||
print("Number of Platform Certs: %d" % (cert_list['recordsTotal']))
|
||||
self.assertEqual(cert_list['data'][0]['credentialType'], "TCG Trusted Platform Endorsement")
|
||||
self.assertEqual(cert_list['data'][0]['platformType'], "Base")
|
||||
|
||||
# Try uploading a second Platform Base Cert
|
||||
print("Attempting to upload a second Platform Base Cert...")
|
||||
AcaPortal.upload_pk_cert(PBaseCertB_LOCATION)
|
||||
|
||||
# Confirm Platform Base Cert has not been loaded
|
||||
cert_list = AcaPortal.get_pk_certs()
|
||||
self.assertEqual(cert_list['recordsTotal'], 1)
|
||||
print("Number of Platform Certs: %d" % (cert_list['recordsTotal']))
|
||||
self.assertEqual(cert_list['data'][0]['credentialType'], "TCG Trusted Platform Endorsement")
|
||||
self.assertEqual(cert_list['data'][0]['platformType'], "Base")
|
||||
|
||||
if (cert_list['recordsTotal'] == 1):
|
||||
print ("SUCCESS.\n")
|
||||
else:
|
||||
print ("FAILED.\n")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A3_base_delta(self):
|
||||
"""Test Delta Certificates A3 - Provisioning with Good Base Platform Cert Base and 1 Delta Cert"""
|
||||
logging.info("***************** test_20_A3 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform Cert Base and 1 Delta Cert")
|
||||
|
||||
# Verify device supply chain appraisal result is PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
# Upload the SIDeltaCertA1 and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertA1_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
print("test_20_A3_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
supply_chain_validation_summaries = AcaPortal.get_supply_chain_validation_summaries()
|
||||
# Verify this is one SCVS record indicating PASS
|
||||
self.assertEqual(supply_chain_validation_summaries['recordsTotal'], 2)
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][0]['overallValidationResult'], "PASS")
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][1]['overallValidationResult'], "PASS")
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A4_base_delta(self):
|
||||
"""Test Delta Certificates A4 - Provisioning with Good Base Platform Cert Base and 2 Delta Certs"""
|
||||
logging.info("***************** test_20_A4 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform Cert Base and 2 Delta Certs")
|
||||
|
||||
# Verify device supply chain appraisal result is PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
# Upload the VARDeltaCertA1 and provision
|
||||
AcaPortal.upload_pk_cert(VARDeltaCertA1_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A4_base_delta run output: {0}".format(provisioner_out))
|
||||
supply_chain_validation_summaries = AcaPortal.get_supply_chain_validation_summaries()
|
||||
|
||||
# Verify this is one SCVS record indicating PASS
|
||||
self.assertEqual(supply_chain_validation_summaries['recordsTotal'], 3)
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][0]['overallValidationResult'], "PASS")
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][1]['overallValidationResult'], "PASS")
|
||||
self.assertEqual(supply_chain_validation_summaries['data'][2]['overallValidationResult'], "PASS")
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A5_base_delta(self):
|
||||
"""Test Delta Certificates A5 - Provisioning with Good Base Platform Cert and 1 Bad Delta Cert"""
|
||||
logging.info("***************** test_20_A5 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform Cert and 1 Bad Delta Cert")
|
||||
|
||||
# TODO: Determine if we need this test
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A6_base_delta(self):
|
||||
"""Test Delta Certificates A6 - Provisioning with Good Base Platform, 2 Good Delta Certs and 1 Bad Delta Cert"""
|
||||
logging.info("***************** test_20_A6 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform, 2 Good Delta Certs and 1 Bad Delta Cert")
|
||||
|
||||
# Verify device supply chain appraisal result is PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
# Upload the SIDeltaCertA2 and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertA2_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A6_base_delta SHOULD FAIL provisioning using: %s" % (SIDeltaCertA2_LOCATION))
|
||||
print("test_20_A6_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Provisioning should fail since the Delta contains a bad component.
|
||||
self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
# Upload the SIDeltaCertA2_resolved and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertA2_resolved_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A6_base_delta SHOULD PASS provisioning using: %s" % (SIDeltaCertA2_resolved_LOCATION))
|
||||
print("test_20_A6_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A7_base_delta(self):
|
||||
"""Test Delta Certificates A7 - Provisioning with Good Base Platform, 2 Good Delta Certs and
|
||||
1 Bad Delta Cert with non present component"""
|
||||
logging.info("***************** test_20_A7 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform, 2 Good Delta Certs and 1 Bad Delta Cert with non present component")
|
||||
|
||||
# Upload the VARDeltaCertA2 and provision
|
||||
AcaPortal.upload_pk_cert(VARDeltaCertA2_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A7_base_delta SHOULD FAIL provisioning using: %s" % (VARDeltaCertA2_LOCATION))
|
||||
print("test_20_A7_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Provisioning should fail since the Delta contains a component thats not in the Base
|
||||
self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
# Upload the VARDeltaCertA2_resolved and provision
|
||||
AcaPortal.upload_pk_cert(VARDeltaCertA2_resolved_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A7_base_delta SHOULD PASS provisioning using: %s" % (VARDeltaCertA2_resolved_LOCATION))
|
||||
print("test_20_A7_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_GOOD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_A8_base_delta(self):
|
||||
"""Test Delta Certificates A8 - Provisioning with Good Base Platform, 2 Good Delta Certs with 1 Delta cert
|
||||
replacing component from previous, using the Delta as a base certificate"""
|
||||
logging.info("***************** test_20_A8 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Good Base Platform, 2 Good Delta Certs with 1 Delta cert replacing component from previous, using the Delta as a base certificate")
|
||||
|
||||
# Upload the SIDeltaCertA3 and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertA3_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_A8_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device has been updated with supply chain appraisal result
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
@collectors(['BASE_DELTA_BAD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_B1_base_delta(self):
|
||||
"""Test Base/Delta Certificates B1 - Provisioning with Bad Platform Cert Base """
|
||||
logging.info("***************** test_20_B1 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Bad Platform Cert Base")
|
||||
|
||||
logging.info("Check if ACA is online...")
|
||||
AcaPortal.check_is_online()
|
||||
|
||||
logging.info("Uploading CA cert: " + CA_CERT_LOCATION)
|
||||
AcaPortal.upload_ca_cert(CA_CERT_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_B1_base_delta SHOULD FAIL provisioning using: %s" % (PBaseCertB_LOCATION))
|
||||
print("test_20_B1_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Provisioning should fail since the PC contains FAULTY components.
|
||||
self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
@collectors(['BASE_DELTA_BAD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_B2_base_delta(self):
|
||||
"""Test Base/Delta Certificates B2 - Provisioning with Bad Platform Cert Base and 1 Good delta with 1 bad component unresolved"""
|
||||
logging.info("***************** test_20_B2 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Bad Platform Cert Base and 1 Good delta with 1 bad component unresolved")
|
||||
|
||||
# Verify device supply chain appraisal result is FAIL
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "FAIL")
|
||||
|
||||
# Upload the SIDeltaCertB1 and provision
|
||||
AcaPortal.upload_pk_cert(SIDeltaCertB1_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_B2_base_delta SHOULD FAIL provisioning using: %s" % (SIDeltaCertB1_LOCATION))
|
||||
print("test_20_B2_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Provisioning should fail since the delta contains FAULTY component.
|
||||
self.assertIn("Provisioning failed", format(provisioner_out))
|
||||
|
||||
@collectors(['BASE_DELTA_BAD'], COLLECTOR_LIST)
|
||||
@unittest.skipIf(not is_tpm_2_0(TPM_VERSION), "Skipping this test due to TPM Version " + TPM_VERSION)
|
||||
def test_20_B3_base_delta(self):
|
||||
"""Test Base/Delta Certificates B3 - Provisioning with Bad Platform Cert Base and 2 Good delta with all component resolved"""
|
||||
logging.info("***************** test_20_B3 - Beginning of delta certificate test *****************")
|
||||
logging.info("Provisioning with Bad Platform Cert Base and 2 Good delta with all component resolved")
|
||||
|
||||
# Verify device supply chain appraisal result is FAIL
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "FAIL")
|
||||
|
||||
# Upload the VARDeltaCertB1 and provision
|
||||
AcaPortal.upload_pk_cert(VARDeltaCertB1_LOCATION)
|
||||
AcaPortal.enable_supply_chain_validations()
|
||||
provisioner_out = run_hirs_provisioner_tpm_2_0(CLIENT)
|
||||
|
||||
print("test_20_B3_base_delta run output: {0}".format(provisioner_out))
|
||||
|
||||
# Verify device has been updated with supply chain appraisal of PASS
|
||||
devices = AcaPortal.get_devices()
|
||||
self.assertEqual(devices['data'][0]['device']['supplyChainStatus'], "PASS")
|
||||
|
||||
if __name__ == '__main__':
|
||||
suite = unittest.TestLoader().loadTestsFromTestCase(SystemTest)
|
||||
ret = not unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful()
|
||||
sys.exit(ret)
|
@ -1,583 +0,0 @@
|
||||
# Defines core methods shared amongst system test scripts.
|
||||
|
||||
from future import standard_library
|
||||
standard_library.install_aliases()
|
||||
from builtins import str
|
||||
from builtins import filter
|
||||
from builtins import object
|
||||
from io import StringIO
|
||||
import binascii
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pprint
|
||||
import random
|
||||
import requests
|
||||
import shlex
|
||||
import subprocess
|
||||
import time
|
||||
import unittest
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
DEFAULT_GROUP_NAME = "Default Group"
|
||||
DEFAULT_TPM_POLICY = "Test TPM Policy"
|
||||
DEFAULT_IMA_POLICY = "Test IMA Policy"
|
||||
CACHED_XML_REPORT = None
|
||||
|
||||
APPRAISAL_SUCCESS_MESSAGE = "Appraisal passed"
|
||||
|
||||
class HIRSPortal(object):
|
||||
def __init__(self, hirs_server_url):
|
||||
self.server_url = hirs_server_url
|
||||
|
||||
def request(self, method, path, params={}, data={}, files={}, expected_status_codes=[200], operation=None, verify=False):
|
||||
return web_request(self.server_url, method, path, params, data, files, expected_status_codes, operation, verify)
|
||||
|
||||
def set_default_policies(self, tpm_policy="No Policy",
|
||||
ima_policy="No Policy"):
|
||||
"""set the given policies to be the policies for the default group."""
|
||||
payload = {"description": "default group modified for systems tests",
|
||||
"name": DEFAULT_GROUP_NAME}
|
||||
# TODO this will report failure if the group already exists. Not sure how to avoid this
|
||||
request_result = self.request("post", "portal/group/create", data=payload)
|
||||
self.set_tpm_ima_policy(DEFAULT_GROUP_NAME, tpm_policy, ima_policy)
|
||||
|
||||
def set_tpm_ima_policy(self, group_name=DEFAULT_GROUP_NAME, tpm_policy=None, ima_policy=None):
|
||||
"""set the TPM and IMA policy for the group"""
|
||||
payload = {"name": group_name,
|
||||
"ima": ima_policy,
|
||||
"tpm": tpm_policy,
|
||||
"optionRadio" : "existingImaPolicy",
|
||||
"policyName" : ""}
|
||||
self.request("post", "portal/group/update/policies", data=payload)
|
||||
|
||||
payload = {"name": group_name,
|
||||
"ima": ima_policy,
|
||||
"tpm": tpm_policy,
|
||||
"optionRadio" : "existingTpmPolicy",
|
||||
"policyName" : ""}
|
||||
self.request("post", "portal/group/update/policies", data=payload)
|
||||
|
||||
def set_group_appraisal_wait_setting(self, group_name=DEFAULT_GROUP_NAME,
|
||||
is_client_waiting='checked'):
|
||||
"""set the specified group's client wait for appraisal setting to the specified value."""
|
||||
self.request("post", "portal/group/editWaitForAppraisalCompletion", data={"groupName": group_name, "enabled" : is_client_waiting})
|
||||
|
||||
def get_latest_report(self):
|
||||
"""Retrieves the latest report that was created for the given client.
|
||||
|
||||
The retrieved report is cached. Calling run_hirs_report will clear the
|
||||
latest report from the cache.
|
||||
"""
|
||||
global CACHED_XML_REPORT
|
||||
if CACHED_XML_REPORT:
|
||||
logging.info("found cached XML report")
|
||||
return CACHED_XML_REPORT
|
||||
|
||||
logging.info("cached XML report not found, retrieving latest report from"
|
||||
"the server")
|
||||
|
||||
latest_report_id = self.get_latest_report_summary()['report']['id']
|
||||
logging.info("requesting raw report")
|
||||
|
||||
request_result = self.request("get", "portal/report/xml/raw?uuid=" + latest_report_id, operation="get latest report")
|
||||
CACHED_XML_REPORT = request_result.text
|
||||
return CACHED_XML_REPORT
|
||||
|
||||
def get_alert_count_from_latest_report(self):
|
||||
""" Retrieves the alert count from the latest report. """
|
||||
return self.get_alerts_from_latest_report()['recordsTotal']
|
||||
|
||||
def get_alerts_from_latest_report(self):
|
||||
""" Retrieves the alert list from the latest report. """
|
||||
latest_report_id = self.get_latest_report_summary()['report']['id']
|
||||
return self.request("get", "portal/alerts/list?report=" + latest_report_id).json()
|
||||
|
||||
def start_on_demand(self, group_name="Default%20Group"):
|
||||
self.request("get", "portal/on-demand/group/" + group_name)
|
||||
|
||||
def get_latest_report_summary(self):
|
||||
"""Pull the latest report summary from the Portal."""
|
||||
all_reports = self.request("get", "portal/report/list").json()['data']
|
||||
if len(all_reports) == 0:
|
||||
return None
|
||||
return max(all_reports, key=lambda report: report['timestamp'])
|
||||
|
||||
def get_devices(self):
|
||||
"""Get devices Portal."""
|
||||
return self.request("get", "portal/devices/list").json()
|
||||
|
||||
def report_contains_ima_record(self, filename, sha_hash, report_id):
|
||||
"""Check whether the report with the given id contains the given filename
|
||||
and hash.
|
||||
"""
|
||||
logging.info("checking if report with ID {} contains file {} with hash {}".format(
|
||||
report_id, filename, sha_hash))
|
||||
ima_records = self.request("get", "portal/report/list/imaRecords", params={'scope': 'REPORT', 'id': report_id}).json()['data']
|
||||
|
||||
def record_matcher(record):
|
||||
# check for IMA records with this hash, and if the filename is in the record's path
|
||||
# (works for full or partial path)
|
||||
return (record['hash']['digestString'] == sha_hash) and (filename in record['path'])
|
||||
|
||||
matching_records = list(filter(record_matcher, ima_records))
|
||||
return len(matching_records) > 0
|
||||
|
||||
def upload_payload(self, payload):
|
||||
json_path = "tmp.json"
|
||||
json_file = open(json_path, 'w')
|
||||
json_file.write(json.dumps(payload))
|
||||
json_file.close()
|
||||
post_file = {'file': open(json_path, 'rb')}
|
||||
logging.debug("uploading policy:\n{0}".format(pprint.pformat(payload)))
|
||||
response = self.request("post", "portal/policies/import", files=post_file, operation="upload policy")
|
||||
post_file['file'].close()
|
||||
os.remove(json_path)
|
||||
return payload["name"]
|
||||
|
||||
def add_ima_policy(self, required_set=None, whitelist=None, blacklist=None, ignore=None, unknown_fail="false", delta_reports_enabled="false", policy_name_prefix=""):
|
||||
timestamp = get_current_timestamp()
|
||||
policy_name = "{0}_IMA_systems_test_policy_{1}".format(policy_name_prefix, timestamp)
|
||||
policy_description = "IMA policy for systems testing"
|
||||
payload = {"name": policy_name,
|
||||
"description": policy_description,
|
||||
"type": "IMA"}
|
||||
|
||||
required_payload, whitelist_payload, ignore_payload, blacklist_payload = [], [], [], []
|
||||
|
||||
if required_set is not None:
|
||||
required_payload.append(required_set)
|
||||
|
||||
if whitelist is not None:
|
||||
whitelist_payload.append(whitelist)
|
||||
|
||||
if blacklist is not None:
|
||||
blacklist_payload.append(blacklist)
|
||||
|
||||
if ignore is not None:
|
||||
ignore_payload.append(ignore)
|
||||
|
||||
ima_payload = {
|
||||
"deltaReportEnable": delta_reports_enabled,
|
||||
"failOnUnknowns": unknown_fail,
|
||||
"validatePcr": "false",
|
||||
"checkSubsequentBaselines": "true",
|
||||
"partialPathEnable": "true",
|
||||
"required": required_payload,
|
||||
"whitelist": whitelist_payload,
|
||||
"blacklist": blacklist_payload,
|
||||
"ignoreSet": ignore_payload
|
||||
}
|
||||
payload.update(ima_payload)
|
||||
|
||||
return self.upload_payload(payload)
|
||||
|
||||
def add_tpm_wl_policy(self, baseline, policy_name_prefix=""):
|
||||
timestamp = get_current_timestamp()
|
||||
policy_name = "{0}_TPM_systems_test_wl_policy_{1}".format(policy_name_prefix, timestamp)
|
||||
policy_description = "TPM white list policy for systems testing"
|
||||
payload = {"name": policy_name,
|
||||
"description": policy_description,
|
||||
"type": "TPM"}
|
||||
|
||||
tpm_payload = {"appraiserPcrMask": 0xffffff,
|
||||
"reportPcrMask": 0xffffff,
|
||||
"appraiseFullReport": "true",
|
||||
"validateSignature": "true",
|
||||
"white-list-baselines": [baseline]}
|
||||
payload.update(tpm_payload)
|
||||
|
||||
return self.upload_payload(payload)
|
||||
|
||||
def add_tpm_bl_policy(self, baseline, policy_name_prefix=""):
|
||||
timestamp = get_current_timestamp()
|
||||
policy_name = "{0}_TPM_systems_test_bl_policy_{1}".format(policy_name_prefix, timestamp)
|
||||
policy_description = "TPM black list policy for systems testing"
|
||||
payload = {"name": policy_name,
|
||||
"description": policy_description,
|
||||
"type": "TPM"}
|
||||
|
||||
tpm_payload = {"appraiserPcrMask": 0xffffff,
|
||||
"reportPcrMask": 0xffffff,
|
||||
"appraiseFullReport": "true",
|
||||
"validateSignature": "true",
|
||||
"black-list-baselines": [baseline]}
|
||||
payload.update(tpm_payload)
|
||||
|
||||
return self.upload_payload(payload)
|
||||
|
||||
def add_to_ima_baseline(self, baseline_name, file_path, file_hash):
|
||||
self.request("post", "portal/baselines/record/ima/add", data={'name': baseline_name, 'path': file_path, 'hash': file_hash}, operation="add to IMA baseline")
|
||||
|
||||
def upload_csv_baseline(self, baseline_path, appraiser_type):
|
||||
post_file = {'file': open(baseline_path, 'rb')}
|
||||
current_time = datetime.datetime.now()
|
||||
baseline_name = baseline_path.split('.')[0] + '_' + str(current_time.hour) + '-' + str(current_time.minute) + '-' + str(current_time.second)
|
||||
self.request("post", "uploadImaCsv", data={'baselineName': baseline_name, 'optionsRadios': appraiser_type}, files=post_file, operation="upload baseline")
|
||||
if request_result != 200:
|
||||
logging.error("upload baseline return code: {0}, response text:\n"
|
||||
"{1}".format(request_result.status_code, request_result.text))
|
||||
post_file['file'].close()
|
||||
subprocess.call("rm " + baseline_path, shell=True)
|
||||
return baseline_name
|
||||
|
||||
"""Creates a Yum repository, configures it with a URL, triggers an update, and waits for the update to complete via Portal endpoints."""
|
||||
def configure_yum_repository(self, baseline_name, base_url):
|
||||
self.request("post", "portal/repository/create", params={'name':baseline_name,'type':'Yum'}, operation="create Yum repository")
|
||||
self.request("post", "portal/repository/update/url", params={'name':baseline_name,'baseUrl':base_url}, operation="set URL of Yum repository")
|
||||
self.request("post", "portal/repository/job/trigger", params={'name':baseline_name}, operation="update Yum repository")
|
||||
|
||||
# 4. wait for update to finish
|
||||
update_complete = False
|
||||
max_wait_time_seconds = 240
|
||||
sleep_time_seconds = 5
|
||||
counter = 1
|
||||
while not update_complete:
|
||||
time.sleep(sleep_time_seconds)
|
||||
|
||||
if counter * sleep_time_seconds >= max_wait_time_seconds:
|
||||
msg = "Timeout waiting for repository update: {0} seconds".format(max_wait_time_seconds)
|
||||
logging.error(msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
counter += 1
|
||||
request_result = self.request("get", "portal/repository/job/check", params={'name':baseline_name}, operation="check status of repo update job")
|
||||
update_complete = not json.loads(request_result.text)['jobCurrentlyRunning']
|
||||
|
||||
"""Creates a BroadRepoImaBaseline repository, configures it with a repository, and updates the baseline from the repository's contents via Portal endpoints."""
|
||||
def create_broad_ima_baseline(self, baseline_name, repository_name):
|
||||
self.request("post", "portal/baselines/create", params={'name':baseline_name,'type':'broad'}, operation="create broad baseline")
|
||||
self.request("post", "portal/baselines/update/repositories", params={'name':baseline_name,'repositories':[repository_name]}, operation="add repository to broad baseline")
|
||||
self.request("post", "portal/baselines/triggerupdate", params={'name':baseline_name}, operation="update broad repository from its repository")
|
||||
|
||||
"""Creates a new Policy with the given type and name via Portal endpoints."""
|
||||
def create_policy(self, name, policy_type):
|
||||
self.request("post", "portal/policies/create", params={'name':name,'type':policy_type}, operation="create new policy")
|
||||
|
||||
"""Enables or disables partial path checking for an IMA policy."""
|
||||
def set_partial_paths_for_ima_policy(self, policy_name, enabled):
|
||||
checked = 'unchecked'
|
||||
if enabled:
|
||||
checked = 'checked'
|
||||
self.request("post", "portal/policies/update", params={'name':policy_name,'partial':checked}, operation="update policy's partial path setting")
|
||||
|
||||
"""Enables or disables kernel detection for a TPM policy."""
|
||||
def set_kernel_setting(self, policy_name, kernel_detect_enabled, kernel_alert_enabled, kernel_alert_severity="UNSPECIFIED"):
|
||||
kernel_detect_checked = 'false'
|
||||
if kernel_detect_enabled:
|
||||
kernel_detect_checked = 'true'
|
||||
kernel_alert_checked = 'false'
|
||||
if kernel_alert_enabled:
|
||||
kernel_alert_checked = 'true'
|
||||
self.request("post", "portal/policies/update/editKernelDetectSettings", params={'name':policy_name,'kernelDetectToggle':kernel_detect_checked,'kernelAlertToggle':kernel_alert_checked,'kernelAlertSeverity':kernel_alert_severity}, operation="update policy's kernel detection setting")
|
||||
|
||||
"""Creates a new Policy with the given type and name via Portal endpoints."""
|
||||
def add_baseline_to_required_sets(self, policy_name, baseline_name):
|
||||
self.request("post", "portal/policies/update", params={'name':policy_name,'required':[baseline_name]}, operation="add baseline to required sets")
|
||||
|
||||
def get_alerts(self):
|
||||
return self.request("get", "portal/alerts/list").json()
|
||||
|
||||
class AttestationCAPortal(object):
|
||||
def __init__(self, hirs_server_url):
|
||||
self.server_url = hirs_server_url
|
||||
|
||||
def request(self, method, path, params={}, data={}, files={}, expected_status_codes=[200], operation=None, verify=False):
|
||||
return web_request(self.server_url, method, path, params, data, files, expected_status_codes, operation, verify)
|
||||
|
||||
def check_is_online(self):
|
||||
return self.request("get", "portal/certificate-request/platform-credentials/list").json()
|
||||
|
||||
def get_supply_chain_validation_summaries(self):
|
||||
return self.request("get", "portal/validation-reports/list").json()
|
||||
|
||||
def disable_supply_chain_validations(self):
|
||||
|
||||
# The initial POST request goes through, but the redirect from the server is attempted
|
||||
# which results in a 404, or possibly a 200 on centos7, apparently.
|
||||
self.request("post", "portal/policy/update-ec-validation",
|
||||
expected_status_codes=[404, 200], params={'ecValidate': "unchecked",})
|
||||
self.request("post", "portal/policy/update-pc-validation",
|
||||
expected_status_codes=[404, 200], params={'pcValidate': 'unchecked'})
|
||||
self.request("post", "portal/policy/update-pc-attribute-validation",
|
||||
expected_status_codes=[404, 200], params={'pcAttributeValidate': 'unchecked'})
|
||||
|
||||
def enable_supply_chain_validations(self):
|
||||
|
||||
# The initial POST request goes through, but the redirect from the server is attempted
|
||||
# which results in a 404, or possibly a 200 on centos7, apparently.
|
||||
self.request("post", "portal/policy/update-ec-validation",
|
||||
expected_status_codes=[404, 200], params={'ecValidate': "checked",})
|
||||
self.request("post", "portal/policy/update-pc-validation",
|
||||
expected_status_codes=[404, 200], params={'pcValidate': 'checked'})
|
||||
self.request("post", "portal/policy/update-pc-attribute-validation",
|
||||
expected_status_codes=[404, 200], params={'pcAttributeValidate': 'checked'})
|
||||
|
||||
def enable_ec_validation(self):
|
||||
self.request("post", "portal/policy/update-ec-validation",
|
||||
expected_status_codes=[404, 200], params={'ecValidate': "checked",})
|
||||
|
||||
def get_devices(self):
|
||||
"""Get devices from ACA portal."""
|
||||
return self.request("get", "portal/devices/list").json()
|
||||
|
||||
def get_ek_certs(self):
|
||||
"""Get EK certs from ACA portal."""
|
||||
return self.request("get", "portal/certificate-request/endorsement-key-credentials/list").json()
|
||||
|
||||
def get_pk_certs(self):
|
||||
"""Get PK certs from ACA portal."""
|
||||
return self.request("get", "portal/certificate-request/platform-credentials/list").json()
|
||||
|
||||
def get_trust_chains(self):
|
||||
"""Get trust chains from ACA portal."""
|
||||
return self.request("get", "portal/certificate-request/trust-chain/list").json()
|
||||
|
||||
def upload_ca_cert(self, ca_cert_file):
|
||||
file = {'file': open(ca_cert_file, 'rb')}
|
||||
self.request("post", "portal/certificate-request/trust-chain/upload", files=file, operation="upload CA cert")
|
||||
|
||||
def upload_pk_cert(self, pk_cert_file):
|
||||
file = {'file': open(pk_cert_file, 'rb')}
|
||||
self.request("post", "portal/certificate-request/platform-credentials/upload", files=file, operation="upload PK cert")
|
||||
|
||||
def web_request(server_url, method, path, params={}, data={}, files={}, expected_status_codes=[200], operation=None, verify=False):
|
||||
url = server_url + path
|
||||
if method not in ['get', 'post']:
|
||||
raise ValueError("Method " + method + " not valid.")
|
||||
request_response = getattr(requests, method)(url, params=params, data=data, files=files, verify=verify)
|
||||
|
||||
request_msg = method + " " + url
|
||||
if operation == None:
|
||||
operation = request_msg
|
||||
else:
|
||||
operation += " (" + request_msg + ")"
|
||||
|
||||
check_request_response(expected_status_codes, request_response, operation)
|
||||
return request_response
|
||||
|
||||
"""Checks a requests response to see if its status code matches the expected status code. If it does, this method returns True. If it does not, this
|
||||
method will log the error and return False."""
|
||||
def check_request_response(expected_status_codes, request_result, operation):
|
||||
if not request_result.status_code in expected_status_codes:
|
||||
message = "Unable to " + operation + ": {0}, response text:\n{1}".format(request_result.status_code, request_result.text)
|
||||
logging.error(message)
|
||||
raise RuntimeError(message)
|
||||
|
||||
def collectors(collectors, collector_list):
|
||||
enabled_collectors = set(collector_list)
|
||||
tested_collectors = set(collectors)
|
||||
if tested_collectors.issubset(enabled_collectors):
|
||||
return lambda func: func
|
||||
return unittest.skip("{0} collector isn't enabled".format(tested_collectors.difference(enabled_collectors)))
|
||||
|
||||
def send_command(full_command, accept_nonzero_status=False):
|
||||
parsed_command = shlex.split(full_command)
|
||||
p = subprocess.Popen(parsed_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
client_out, client_err = p.communicate()
|
||||
if p.returncode != 0 and not accept_nonzero_status:
|
||||
logging.error("Command: " + full_command + " exited with return code " + str(p.returncode))
|
||||
logging.error(str(client_out))
|
||||
logging.error(str(client_err))
|
||||
raise RuntimeError("Command exited with a nonzero status, out:\n" + str(client_out) + "\nerr:\n" + str(client_err))
|
||||
return client_out
|
||||
|
||||
def send_command_sha1sum(full_command, accept_nonzero_status=False):
|
||||
sha1sum_command = shlex.split(full_command)
|
||||
head_command = ['head', '-c40']
|
||||
p1 = subprocess.Popen(sha1sum_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
p2 = subprocess.Popen(head_command, stdin=p1.stdout,stdout=subprocess.PIPE)
|
||||
p1.stdout.close()
|
||||
client_out, client_err = p2.communicate()
|
||||
if p2.returncode != 0 and not accept_nonzero_status:
|
||||
logging.error("Command: " + full_command + " exited with return code " + str(p2.returncode))
|
||||
logging.error(str(client_out))
|
||||
logging.error(str(client_err))
|
||||
raise RuntimeError("Command exited with a nonzero status, out:\n" + str(client_out) + "\nerr:\n" + str(client_err))
|
||||
return client_out
|
||||
|
||||
def run_hirs_report(client_hostname):
|
||||
"""Runs a hirs report for the specified client host name.
|
||||
The cached xml report is cleared.
|
||||
|
||||
Returns true if the client output indicated appraisal success. false otherwise
|
||||
"""
|
||||
client_out = run_hirs_report_and_clear_cache_V2(client_hostname)
|
||||
if APPRAISAL_SUCCESS_MESSAGE in client_out:
|
||||
logging.info("Report appraisal passed")
|
||||
return True
|
||||
else:
|
||||
logging.info("Report appraisal unsuccessful: " + client_out)
|
||||
return False
|
||||
|
||||
def run_hirs_report_and_clear_cache(client_hostname):
|
||||
"""Runs a hirs report for the specified client host name.
|
||||
The cached xml report is cleared.
|
||||
|
||||
Returns the client output text from running the command.
|
||||
"""
|
||||
|
||||
logging.info("running hirs report over ssh on {0}".format(client_hostname))
|
||||
client_out = send_command("sudo hirs report",accept_nonzero_status=True)
|
||||
global CACHED_XML_REPORT
|
||||
if CACHED_XML_REPORT:
|
||||
logging.info("clearing cached XML report")
|
||||
CACHED_XML_REPORT = None
|
||||
return client_out
|
||||
|
||||
def run_hirs_provisioner_tpm_1_2(client_hostname):
|
||||
"""Runs the hirs provisioner TPM 1.2"""
|
||||
logging.info("running hirs provisioner TPM 1.2 on {0}".format(client_hostname))
|
||||
client_out = send_command("hirs-provisioner provision")
|
||||
return client_out
|
||||
|
||||
def run_hirs_provisioner_tpm_2_0(client_hostname):
|
||||
"""Runs the hirs provisioner TPM 2.0
|
||||
"""
|
||||
logging.info("running hirs provisioner TPM 2.0 on {0}".format(client_hostname))
|
||||
client_out = send_command("hirs-provisioner-tpm2 provision")
|
||||
return client_out
|
||||
|
||||
def parse_xml_with_stripped_namespaces(raw_xml_string):
|
||||
"""Parses the raw XML text in to an XML node element.
|
||||
Strips namespaces which conflict with recusive tree search.
|
||||
"""
|
||||
it = ET.iterparse(StringIO(raw_xml_string))
|
||||
for _, el in it:
|
||||
if '}' in el.tag:
|
||||
el.tag = el.tag.split('}', 1)[1] # strip all namespaces
|
||||
for at in list(el.attrib.keys()): # strip namespaces of attributes too
|
||||
if '}' in at:
|
||||
newat = at.split('}', 1)[1]
|
||||
el.attrib[newat] = el.attrib[at]
|
||||
del el.attrib[at]
|
||||
return it.root
|
||||
|
||||
|
||||
def get_all_nodes_recursively(tree_node, node_name):
|
||||
return tree_node.findall('.//' + node_name)
|
||||
|
||||
def touch_random_file_and_remove(client_hostname):
|
||||
"""Write a random string to a random filename in /tmp/, read it as root, then delete it.
|
||||
"""
|
||||
random_number = str(int(random.random() * 100000))
|
||||
filename = "/tmp/on_demand_test_file{}.txt".format(random_number)
|
||||
|
||||
echo_command = "echo {} > {}".format(random_number, filename)
|
||||
cat_command = "sudo cat {}".format(filename)
|
||||
sha_command = "sha1sum {}".format(filename)
|
||||
rm_command = "rm {}".format(filename)
|
||||
|
||||
combined_command = "{};{};{};{}".format(echo_command, cat_command, sha_command, rm_command)
|
||||
sha_hash = command_output.split()[1]
|
||||
|
||||
return (filename, sha_hash)
|
||||
|
||||
def make_simple_ima_baseline():
|
||||
timestamp = get_current_timestamp()
|
||||
|
||||
if CLIENT_OS == "centos6":
|
||||
records = [{"path": "/lib/udev/console_init",
|
||||
"hash": send_command_sha1sum("sha1sum /lib/udev/console_init")},
|
||||
{"path": "/bin/mknod",
|
||||
"hash": send_command_sha1sum("sha1sum /bin/mknod")}]
|
||||
elif CLIENT_OS == "centos7":
|
||||
records = [{"path": "/lib/systemd/rhel-readonly",
|
||||
"hash": send_command_sha1sum("sha1sum /lib/systemd/rhel-readonly")},
|
||||
{"path": "/bin/sort",
|
||||
"hash": send_command_sha1sum("sha1sum /bin/sort")}]
|
||||
elif CLIENT_OS == "ubuntu16":
|
||||
records = [{"path": "/lib/systemd/systemd-udevd",
|
||||
"hash": send_command_sha1sum("sha1sum /lib/systemd/systemd-udevd")},
|
||||
{"path": "/bin/udevadm",
|
||||
"hash": send_command_sha1sum("sha1sum /bin/udevadm")}]
|
||||
else:
|
||||
logging.error("unsupported client os type: %s", CLIENT_OS)
|
||||
|
||||
simple_baseline = {"name": "simple_ima_baseline_{0}".format(timestamp),
|
||||
"description": "a simple hard-coded ima baseline for systems testing",
|
||||
"records": records}
|
||||
return simple_baseline
|
||||
|
||||
def make_baseline_from_xml(xml_report, appraiser_type):
|
||||
"""search the xml for records and add each one to a dictionary."""
|
||||
timestamp = get_current_timestamp()
|
||||
baseline_name = "full_{0}_baseline_{1}".format(appraiser_type, timestamp)
|
||||
baseline_description = "{0} baseline created by parsing an xml report and uploaded for systems testing".format(appraiser_type)
|
||||
baseline = {"name": baseline_name, "description": baseline_description}
|
||||
baseline["records"] = []
|
||||
tree = parse_xml_with_stripped_namespaces(xml_report)
|
||||
|
||||
if appraiser_type == "TPM":
|
||||
pcr_tags = get_all_nodes_recursively(tree, "PcrValue")
|
||||
for pcr_tag in pcr_tags:
|
||||
tpm_digest = get_all_nodes_recursively(pcr_tag, "digest")[0].text
|
||||
parsed_record = {}
|
||||
parsed_record["pcr"] = pcr_tag.attrib['PcrNumber']
|
||||
parsed_record["hash"] = binascii.hexlify(binascii.a2b_base64(tpm_digest))
|
||||
baseline["records"].append(parsed_record)
|
||||
if appraiser_type == "IMA":
|
||||
ima_records = get_all_nodes_recursively(tree, "imaRecords")
|
||||
for ima_record in ima_records:
|
||||
ima_path = get_all_nodes_recursively(ima_record, "path")[0].text
|
||||
ima_digest = get_all_nodes_recursively(ima_record, "digest")[0].text
|
||||
parsed_record = {}
|
||||
parsed_record['path'] = ima_path
|
||||
hash64 = ima_digest
|
||||
parsed_record["hash"] = (
|
||||
binascii.hexlify(binascii.a2b_base64(hash64)))
|
||||
baseline["records"].append(parsed_record)
|
||||
logging.info("created {0} baseline from xml with {1} records".format(
|
||||
appraiser_type, str(len(baseline["records"]))))
|
||||
return baseline
|
||||
|
||||
def make_simple_ima_blacklist_baseline():
|
||||
return {
|
||||
"name": "simple_ima_blacklist_baseline_{0}".format(get_current_timestamp()),
|
||||
"description": "a simple blacklist ima baseline for systems testing",
|
||||
"records": [{"path": "/boot/usb-storage-foo.ko"}]
|
||||
#"records": [{"path": "usb-storage-foo.ko"}]
|
||||
}
|
||||
|
||||
def make_simple_ima_blacklist_baseline_with_hash():
|
||||
return {
|
||||
"name": "simple_ima_blacklist_baseline_{0}".format(get_current_timestamp()),
|
||||
"description": "a simple blacklist ima baseline for systems testing",
|
||||
"records": [{"hash": USB_STORAGE_FILE_HASH}]
|
||||
}
|
||||
|
||||
def make_simple_ima_blacklist_baseline_with_file_and_hash():
|
||||
return {
|
||||
"name": "simple_ima_blacklist_baseline_{0}".format(get_current_timestamp()),
|
||||
"description": "a simple blacklist ima baseline for systems testing",
|
||||
"records": [{"path": "usb-storage_2.ko",
|
||||
"hash": USB_STORAGE_FILE_HASH}]
|
||||
}
|
||||
|
||||
def make_simple_ima_blacklist_baseline_with_updated_file_and_hash():
|
||||
return {
|
||||
"name": "simple_ima_blacklist_baseline_{0}".format(get_current_timestamp()),
|
||||
"description": "a simple blacklist ima baseline for systems testing",
|
||||
"records": [{"path": "test-file",
|
||||
"hash": USB_STORAGE_FILE_HASH_2}]
|
||||
}
|
||||
|
||||
def get_random_pcr_hex_value():
|
||||
""" Gets a random TPM PCR value by combining 2 UUIDs and getting a substring
|
||||
"""
|
||||
# get 40 hex chars
|
||||
return str(binascii.b2a_hex(os.urandom(20)))
|
||||
|
||||
def get_current_timestamp():
|
||||
current_time = datetime.datetime.now()
|
||||
return current_time.strftime('%H-%M-%S')
|
||||
|
||||
def is_ubuntu_client(client_os):
|
||||
return client_os in ["ubuntu14", "ubuntu16"]
|
||||
|
||||
def is_tpm_1_2(tpm_version):
|
||||
return tpm_version in ["1.2"]
|
||||
|
||||
def is_tpm_2_0(tpm_version):
|
||||
return tpm_version in ["2.0", "2"]
|
@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
export CLIENT_OS=centos7
|
||||
export CLIENT_HOSTNAME=hirs-client-$CLIENT_OS-tpm1_2
|
||||
|
||||
export SERVER_OS=$CLIENT_OS
|
||||
export SERVER_HOSTNAME=hirs-appraiser-$SERVER_OS
|
||||
|
||||
export ENABLED_COLLECTORS=TPM
|
||||
export TPM_VERSION=1.2
|
||||
|
||||
$SCRIPT_DIR/systems-test.core.sh
|
@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
export CLIENT_OS=centos7
|
||||
export CLIENT_HOSTNAME=hirs-client-$CLIENT_OS-tpm2
|
||||
|
||||
export SERVER_OS=$CLIENT_OS
|
||||
export SERVER_HOSTNAME=hirs-appraiser-$SERVER_OS
|
||||
|
||||
export ENABLED_COLLECTORS=BASE_DELTA_BAD
|
||||
export TPM_VERSION=2.0
|
||||
|
||||
$SCRIPT_DIR/systems-test.core.sh
|
@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
export CLIENT_OS=centos7
|
||||
export CLIENT_HOSTNAME=hirs-client-$CLIENT_OS-tpm2
|
||||
|
||||
export SERVER_OS=$CLIENT_OS
|
||||
export SERVER_HOSTNAME=hirs-appraiser-$SERVER_OS
|
||||
|
||||
export ENABLED_COLLECTORS=BASE_DELTA_GOOD
|
||||
export TPM_VERSION=2.0
|
||||
|
||||
$SCRIPT_DIR/systems-test.core.sh
|
@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
export CLIENT_OS=centos7
|
||||
export CLIENT_HOSTNAME=hirs-client-$CLIENT_OS-tpm2
|
||||
|
||||
export SERVER_OS=$CLIENT_OS
|
||||
export SERVER_HOSTNAME=hirs-appraiser-$SERVER_OS
|
||||
|
||||
export ENABLED_COLLECTORS=TPM
|
||||
export TPM_VERSION=2.0
|
||||
|
||||
$SCRIPT_DIR/systems-test.core.sh
|
@ -1,28 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
TEST_LOG=$SCRIPT_DIR/test_logs/system_test_$CLIENT_OS.log
|
||||
LOG_LEVEL=logging.INFO
|
||||
|
||||
export CLIENT_HOSTNAME CLIENT_OS TPM_VERSION ENABLED_COLLECTORS TEST_LOG LOG_LEVEL
|
||||
|
||||
# Prepare log file directory
|
||||
rm -rf $SCRIPT_DIR/test_logs
|
||||
mkdir -p $SCRIPT_DIR/test_logs
|
||||
|
||||
# Run system tests
|
||||
echo "===========Running systems tests on ${SERVER_HOSTNAME} and ${CLIENT_HOSTNAME}==========="
|
||||
TEST_OUTPUT=$SCRIPT_DIR/test_logs/test_output$$.txt
|
||||
python $SCRIPT_DIR/system_test.py 2>&1 | tee $TEST_OUTPUT
|
||||
SYSTEM_TEST_EXIT_CODE=$PIPESTATUS
|
||||
|
||||
# Check result
|
||||
if [[ $SYSTEM_TEST_EXIT_CODE == 0 ]]
|
||||
then
|
||||
echo "SUCCESS: System tests TPM $TPM_VERSION passed"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "ERROR: System tests TPM $TPM_VERSION failed"
|
||||
exit 1
|
46
.github/workflows/system_test.yml
vendored
Normal file
46
.github/workflows/system_test.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
# Sets up and runs HIRS System tests
|
||||
|
||||
name: HIRS System Tests
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
env:
|
||||
TEST_STATUS: 0
|
||||
jobs:
|
||||
DockerTests:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
test-result: ${{ steps.set_outputs.outputs.test-result }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up JDK 8
|
||||
uses: actions/setup-java@v2
|
||||
with:
|
||||
java-version: '8'
|
||||
distribution: 'adopt'
|
||||
server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
|
||||
settings-path: ${{ github.workspace }} # location for the settings.xml file
|
||||
- name: ACA TPM2 Tests
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get install -y curl
|
||||
echo ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | docker login -u ${{ secrets.DOCKER_HUB_USERNAME }} --password-stdin
|
||||
bash .ci/system-tests/run-system-tests.sh
|
||||
- name: Archive System Test Log files
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: System_Test_Log_Files
|
||||
path: logs/
|
||||
if-no-files-found: error
|
||||
- name: Check System Test results
|
||||
if: success() || failure()
|
||||
run: |
|
||||
if [ ${TEST_STATUS} == "0" ]; then
|
||||
exit 0;
|
||||
else
|
||||
exit 1;
|
||||
fi
|
70
.travis.yml
70
.travis.yml
@ -1,70 +0,0 @@
|
||||
# NOTE: if you are editing this, try using the yamllint tool to check your work.
|
||||
# yamllint disable rule:line-length
|
||||
---
|
||||
os:
|
||||
- linux
|
||||
|
||||
sudo: true
|
||||
|
||||
language: java
|
||||
|
||||
env:
|
||||
- SUBPROJECT=HIRS_Utils
|
||||
- SUBPROJECT=HIRS_Provisioner
|
||||
- SUBPROJECT=HIRS_ProvisionerTPM2
|
||||
- SUBPROJECT=HIRS_Structs
|
||||
- SUBPROJECT=HIRS_AttestationCA
|
||||
- SUBPROJECT=HIRS_AttestationCAPortal
|
||||
- SUBPROJECT=TPM_Utils
|
||||
- SUBPROJECT=tpm_module
|
||||
- TCG_RIM_TOOL=tools/tcg_rim_tool
|
||||
|
||||
services:
|
||||
- docker
|
||||
|
||||
before_cache:
|
||||
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
|
||||
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.gradle/caches/
|
||||
- $HOME/.gradle/wrapper/
|
||||
|
||||
install: true
|
||||
|
||||
script:
|
||||
- echo $DOCKER_PWD | docker login -u "$DOCKER_USER" --password-stdin
|
||||
- docker run --rm -v $(pwd):/HIRS hirs/hirs-ci:centos7 /bin/bash -c "cd /HIRS; ./gradlew :$SUBPROJECT:build -x test :$TCG_RIM_TOOL:build"
|
||||
|
||||
jobs:
|
||||
include:
|
||||
- stage: Packaging and System Tests
|
||||
script:
|
||||
- echo $DOCKER_PWD | docker login -u "$DOCKER_USER" --password-stdin
|
||||
- docker run --rm -v $(pwd):/HIRS hirs/hirs-ci:ubuntu18 /bin/bash -c "cd /HIRS; ./package/package.ubuntu.sh"
|
||||
env: null
|
||||
name: "Package Ubuntu"
|
||||
- stage: Packaging and System Tests
|
||||
script:
|
||||
- echo $DOCKER_PWD | docker login -u "$DOCKER_USER" --password-stdin
|
||||
- .ci/system-tests/./run-system-tests.sh
|
||||
env: null
|
||||
name: "System Tests TPM 1.2"
|
||||
- stage: Packaging and System Tests
|
||||
script:
|
||||
- echo $DOCKER_PWD | docker login -u "$DOCKER_USER" --password-stdin
|
||||
- .ci/system-tests/./run-system-tests-tpm2.sh
|
||||
env: null
|
||||
name: "System Tests TPM 2.0"
|
||||
- stage: Packaging and System Tests
|
||||
script:
|
||||
- echo $DOCKER_PWD | docker login -u "$DOCKER_USER" --password-stdin
|
||||
- .ci/system-tests/./run-system-tests-tpm2-base-delta-bad.sh
|
||||
env: null
|
||||
name: "System Tests TPM 2.0 Base/Delta(Bad)"
|
||||
- stage: Packaging and System Tests
|
||||
script:
|
||||
- echo $DOCKER_PWD | docker login -u "$DOCKER_USER" --password-stdin
|
||||
- .ci/system-tests/./run-system-tests-tpm2-base-delta-good.sh
|
||||
env: null
|
||||
name: "System Tests TPM 2.0 Base/Delta(Good)"
|
Loading…
x
Reference in New Issue
Block a user