mirror of
https://github.com/aquasecurity/trivy.git
synced 2025-12-12 15:50:15 -08:00
refactor(cyclonedx): implement json.Unmarshaler (#2662)
* refactor(cyclonedx): implement json.Unmarshaler * fix: use pointer
This commit is contained in:
@@ -59,18 +59,11 @@ func (a Artifact) Inspect(_ context.Context) (types.ArtifactReference, error) {
|
|||||||
return types.ArtifactReference{}, xerrors.Errorf("seek error: %w", err)
|
return types.ArtifactReference{}, xerrors.Errorf("seek error: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var unmarshaler sbom.Unmarshaler
|
bom, err := a.Decode(f, format)
|
||||||
switch format {
|
|
||||||
case sbom.FormatCycloneDXJSON:
|
|
||||||
unmarshaler = cyclonedx.NewJSONUnmarshaler()
|
|
||||||
default:
|
|
||||||
return types.ArtifactReference{}, xerrors.Errorf("%s scanning is not yet supported", format)
|
|
||||||
|
|
||||||
}
|
|
||||||
bom, err := unmarshaler.Unmarshal(f)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return types.ArtifactReference{}, xerrors.Errorf("failed to unmarshal: %w", err)
|
return types.ArtifactReference{}, xerrors.Errorf("SBOM decode error: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
blobInfo := types.BlobInfo{
|
blobInfo := types.BlobInfo{
|
||||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||||
OS: bom.OS,
|
OS: bom.OS,
|
||||||
@@ -104,6 +97,30 @@ func (a Artifact) Inspect(_ context.Context) (types.ArtifactReference, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a Artifact) Decode(f io.Reader, format sbom.Format) (sbom.SBOM, error) {
|
||||||
|
var (
|
||||||
|
v interface{}
|
||||||
|
bom sbom.SBOM
|
||||||
|
decoder interface{ Decode(any) error }
|
||||||
|
)
|
||||||
|
|
||||||
|
switch format {
|
||||||
|
case sbom.FormatCycloneDXJSON:
|
||||||
|
v = &cyclonedx.CycloneDX{SBOM: &bom}
|
||||||
|
decoder = json.NewDecoder(f)
|
||||||
|
default:
|
||||||
|
return sbom.SBOM{}, xerrors.Errorf("%s scanning is not yet supported", format)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode a file content into sbom.SBOM
|
||||||
|
if err := decoder.Decode(v); err != nil {
|
||||||
|
return sbom.SBOM{}, xerrors.Errorf("failed to decode: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return bom, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (a Artifact) Clean(reference types.ArtifactReference) error {
|
func (a Artifact) Clean(reference types.ArtifactReference) error {
|
||||||
return a.cache.DeleteBlobs(reference.BlobIDs)
|
return a.cache.DeleteBlobs(reference.BlobIDs)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -367,7 +367,7 @@ func (e *Marshaler) reportToCdxComponent(r types.Report) (*cdx.Component, error)
|
|||||||
return component, nil
|
return component, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e Marshaler) resultToCdxComponent(r types.Result, osFound *ftypes.OS) cdx.Component {
|
func (e *Marshaler) resultToCdxComponent(r types.Result, osFound *ftypes.OS) cdx.Component {
|
||||||
component := cdx.Component{
|
component := cdx.Component{
|
||||||
Name: r.Target,
|
Name: r.Target,
|
||||||
Properties: &[]cdx.Property{
|
Properties: &[]cdx.Property{
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
package cyclonedx
|
package cyclonedx
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io"
|
"bytes"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -15,54 +15,46 @@ import (
|
|||||||
"github.com/aquasecurity/trivy/pkg/sbom"
|
"github.com/aquasecurity/trivy/pkg/sbom"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Unmarshaler struct {
|
type CycloneDX struct {
|
||||||
format cdx.BOMFileFormat
|
*sbom.SBOM
|
||||||
|
|
||||||
dependencies map[string][]string
|
dependencies map[string][]string
|
||||||
components map[string]cdx.Component
|
components map[string]cdx.Component
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewJSONUnmarshaler() sbom.Unmarshaler {
|
func (c *CycloneDX) UnmarshalJSON(b []byte) error {
|
||||||
return &Unmarshaler{
|
if c.SBOM == nil {
|
||||||
format: cdx.BOMFileFormatJSON,
|
c.SBOM = &sbom.SBOM{}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func (u *Unmarshaler) Unmarshal(r io.Reader) (sbom.SBOM, error) {
|
|
||||||
bom := cdx.NewBOM()
|
bom := cdx.NewBOM()
|
||||||
decoder := cdx.NewBOMDecoder(r, u.format)
|
decoder := cdx.NewBOMDecoder(bytes.NewReader(b), cdx.BOMFileFormatJSON)
|
||||||
if err := decoder.Decode(bom); err != nil {
|
if err := decoder.Decode(bom); err != nil {
|
||||||
return sbom.SBOM{}, xerrors.Errorf("CycloneDX decode error: %w", err)
|
return xerrors.Errorf("CycloneDX decode error: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
u.dependencies = dependencyMap(bom.Dependencies)
|
c.dependencies = dependencyMap(bom.Dependencies)
|
||||||
u.components = componentMap(bom.Metadata, bom.Components)
|
c.components = componentMap(bom.Metadata, bom.Components)
|
||||||
|
|
||||||
var (
|
var seen = make(map[string]struct{})
|
||||||
osInfo *ftypes.OS
|
for bomRef := range c.dependencies {
|
||||||
apps []ftypes.Application
|
component := c.components[bomRef]
|
||||||
pkgInfos []ftypes.PackageInfo
|
|
||||||
seen = make(map[string]struct{})
|
|
||||||
)
|
|
||||||
for bomRef := range u.dependencies {
|
|
||||||
component := u.components[bomRef]
|
|
||||||
switch component.Type {
|
switch component.Type {
|
||||||
case cdx.ComponentTypeOS: // OS info and OS packages
|
case cdx.ComponentTypeOS: // OS info and OS packages
|
||||||
osInfo = toOS(component)
|
c.OS = toOS(component)
|
||||||
pkgInfo, err := u.parseOSPkgs(component, seen)
|
pkgInfo, err := c.parseOSPkgs(component, seen)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return sbom.SBOM{}, xerrors.Errorf("failed to parse os packages: %w", err)
|
return xerrors.Errorf("failed to parse os packages: %w", err)
|
||||||
}
|
}
|
||||||
pkgInfos = append(pkgInfos, pkgInfo)
|
c.Packages = append(c.Packages, pkgInfo)
|
||||||
case cdx.ComponentTypeApplication: // It would be a lock file in a CycloneDX report generated by Trivy
|
case cdx.ComponentTypeApplication: // It would be a lock file in a CycloneDX report generated by Trivy
|
||||||
if lookupProperty(component.Properties, PropertyType) == "" {
|
if lookupProperty(component.Properties, PropertyType) == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
app, err := u.parseLangPkgs(component, seen)
|
app, err := c.parseLangPkgs(component, seen)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return sbom.SBOM{}, xerrors.Errorf("failed to parse language packages: %w", err)
|
return xerrors.Errorf("failed to parse language packages: %w", err)
|
||||||
}
|
}
|
||||||
apps = append(apps, *app)
|
c.Applications = append(c.Applications, *app)
|
||||||
case cdx.ComponentTypeLibrary:
|
case cdx.ComponentTypeLibrary:
|
||||||
// It is an individual package not associated with any lock files and should be processed later.
|
// It is an individual package not associated with any lock files and should be processed later.
|
||||||
// e.g. .gemspec, .egg and .wheel
|
// e.g. .gemspec, .egg and .wheel
|
||||||
@@ -71,7 +63,7 @@ func (u *Unmarshaler) Unmarshal(r io.Reader) (sbom.SBOM, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var libComponents []cdx.Component
|
var libComponents []cdx.Component
|
||||||
for ref, component := range u.components {
|
for ref, component := range c.components {
|
||||||
if _, ok := seen[ref]; ok {
|
if _, ok := seen[ref]; ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@@ -82,15 +74,15 @@ func (u *Unmarshaler) Unmarshal(r io.Reader) (sbom.SBOM, error) {
|
|||||||
|
|
||||||
aggregatedApps, err := aggregateLangPkgs(libComponents)
|
aggregatedApps, err := aggregateLangPkgs(libComponents)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return sbom.SBOM{}, xerrors.Errorf("failed to aggregate packages: %w", err)
|
return xerrors.Errorf("failed to aggregate packages: %w", err)
|
||||||
}
|
}
|
||||||
apps = append(apps, aggregatedApps...)
|
c.Applications = append(c.Applications, aggregatedApps...)
|
||||||
|
|
||||||
sort.Slice(apps, func(i, j int) bool {
|
sort.Slice(c.Applications, func(i, j int) bool {
|
||||||
if apps[i].Type != apps[j].Type {
|
if c.Applications[i].Type != c.Applications[j].Type {
|
||||||
return apps[i].Type < apps[j].Type
|
return c.Applications[i].Type < c.Applications[j].Type
|
||||||
}
|
}
|
||||||
return apps[i].FilePath < apps[j].FilePath
|
return c.Applications[i].FilePath < c.Applications[j].FilePath
|
||||||
})
|
})
|
||||||
|
|
||||||
var metadata ftypes.Metadata
|
var metadata ftypes.Metadata
|
||||||
@@ -102,29 +94,24 @@ func (u *Unmarshaler) Unmarshal(r io.Reader) (sbom.SBOM, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var components []ftypes.Component
|
var components []ftypes.Component
|
||||||
for _, c := range lo.FromPtr(bom.Components) {
|
for _, component := range lo.FromPtr(bom.Components) {
|
||||||
components = append(components, toTrivyCdxComponent(c))
|
components = append(components, toTrivyCdxComponent(component))
|
||||||
}
|
}
|
||||||
|
|
||||||
return sbom.SBOM{
|
// Keep the original SBOM
|
||||||
OS: osInfo,
|
c.CycloneDX = &ftypes.CycloneDX{
|
||||||
Packages: pkgInfos,
|
BOMFormat: bom.BOMFormat,
|
||||||
Applications: apps,
|
SpecVersion: bom.SpecVersion,
|
||||||
|
SerialNumber: bom.SerialNumber,
|
||||||
// Keep the original SBOM
|
Version: bom.Version,
|
||||||
CycloneDX: &ftypes.CycloneDX{
|
Metadata: metadata,
|
||||||
BOMFormat: bom.BOMFormat,
|
Components: components,
|
||||||
SpecVersion: bom.SpecVersion,
|
}
|
||||||
SerialNumber: bom.SerialNumber,
|
return nil
|
||||||
Version: bom.Version,
|
|
||||||
Metadata: metadata,
|
|
||||||
Components: components,
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *Unmarshaler) parseOSPkgs(component cdx.Component, seen map[string]struct{}) (ftypes.PackageInfo, error) {
|
func (c *CycloneDX) parseOSPkgs(component cdx.Component, seen map[string]struct{}) (ftypes.PackageInfo, error) {
|
||||||
components := u.walkDependencies(component.BOMRef)
|
components := c.walkDependencies(component.BOMRef)
|
||||||
pkgs, err := parsePkgs(components, seen)
|
pkgs, err := parsePkgs(components, seen)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ftypes.PackageInfo{}, xerrors.Errorf("failed to parse os package: %w", err)
|
return ftypes.PackageInfo{}, xerrors.Errorf("failed to parse os package: %w", err)
|
||||||
@@ -135,8 +122,8 @@ func (u *Unmarshaler) parseOSPkgs(component cdx.Component, seen map[string]struc
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *Unmarshaler) parseLangPkgs(component cdx.Component, seen map[string]struct{}) (*ftypes.Application, error) {
|
func (c *CycloneDX) parseLangPkgs(component cdx.Component, seen map[string]struct{}) (*ftypes.Application, error) {
|
||||||
components := u.walkDependencies(component.BOMRef)
|
components := c.walkDependencies(component.BOMRef)
|
||||||
components = lo.UniqBy(components, func(c cdx.Component) string {
|
components = lo.UniqBy(components, func(c cdx.Component) string {
|
||||||
return c.BOMRef
|
return c.BOMRef
|
||||||
})
|
})
|
||||||
@@ -175,10 +162,10 @@ func parsePkgs(components []cdx.Component, seen map[string]struct{}) ([]ftypes.P
|
|||||||
// - type: Application 3
|
// - type: Application 3
|
||||||
// - type: Library D
|
// - type: Library D
|
||||||
// - type: Library E
|
// - type: Library E
|
||||||
func (u *Unmarshaler) walkDependencies(rootRef string) []cdx.Component {
|
func (c *CycloneDX) walkDependencies(rootRef string) []cdx.Component {
|
||||||
var components []cdx.Component
|
var components []cdx.Component
|
||||||
for _, dep := range u.dependencies[rootRef] {
|
for _, dep := range c.dependencies[rootRef] {
|
||||||
component, ok := u.components[dep]
|
component, ok := c.components[dep]
|
||||||
if !ok {
|
if !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@@ -188,7 +175,7 @@ func (u *Unmarshaler) walkDependencies(rootRef string) []cdx.Component {
|
|||||||
components = append(components, component)
|
components = append(components, component)
|
||||||
}
|
}
|
||||||
|
|
||||||
components = append(components, u.walkDependencies(dep)...)
|
components = append(components, c.walkDependencies(dep)...)
|
||||||
}
|
}
|
||||||
return components
|
return components
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package cyclonedx_test
|
package cyclonedx_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"os"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@@ -196,8 +197,8 @@ func TestUnmarshaler_Unmarshal(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
unmarshaler := cyclonedx.NewJSONUnmarshaler()
|
var cdx cyclonedx.CycloneDX
|
||||||
got, err := unmarshaler.Unmarshal(f)
|
err = json.NewDecoder(f).Decode(&cdx)
|
||||||
if tt.wantErr != "" {
|
if tt.wantErr != "" {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
assert.Contains(t, err.Error(), tt.wantErr)
|
assert.Contains(t, err.Error(), tt.wantErr)
|
||||||
@@ -205,6 +206,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Not compare the CycloneDX field
|
// Not compare the CycloneDX field
|
||||||
|
got := *cdx.SBOM
|
||||||
got.CycloneDX = nil
|
got.CycloneDX = nil
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|||||||
@@ -19,18 +19,14 @@ type SBOM struct {
|
|||||||
CycloneDX *types.CycloneDX
|
CycloneDX *types.CycloneDX
|
||||||
}
|
}
|
||||||
|
|
||||||
type Unmarshaler interface {
|
|
||||||
Unmarshal(io.Reader) (SBOM, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type Format string
|
type Format string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
FormatCycloneDXJSON = "cyclonedx-json"
|
FormatCycloneDXJSON Format = "cyclonedx-json"
|
||||||
FormatCycloneDXXML = "cyclonedx-xml"
|
FormatCycloneDXXML Format = "cyclonedx-xml"
|
||||||
FormatSPDXJSON = "spdx-json"
|
FormatSPDXJSON Format = "spdx-json"
|
||||||
FormatSPDXXML = "spdx-xml"
|
FormatSPDXXML Format = "spdx-xml"
|
||||||
FormatUnknown = "unknown"
|
FormatUnknown Format = "unknown"
|
||||||
)
|
)
|
||||||
|
|
||||||
func DetectFormat(r io.ReadSeeker) (Format, error) {
|
func DetectFormat(r io.ReadSeeker) (Format, error) {
|
||||||
|
|||||||
Reference in New Issue
Block a user