Update Database and Worker implementation for layer-wise feature

Feature extraction algorithm is changed to associate features with
ancestry layer. Database is updated to keep the relationship.
This commit is contained in:
Sida Chen 2018-09-05 11:34:49 -04:00 committed by Sida Chen
parent 4b64151330
commit 2827b9342b
17 changed files with 642 additions and 512 deletions

View File

@ -297,8 +297,10 @@ func (m *GetAncestryResponse) GetStatus() *ClairStatus {
} }
type GetAncestryResponse_AncestryLayer struct { type GetAncestryResponse_AncestryLayer struct {
Layer *Layer `protobuf:"bytes,1,opt,name=layer" json:"layer,omitempty"` // The layer's information.
DetectedFeatures []*Feature `protobuf:"bytes,2,rep,name=detectedFeatures" json:"detectedFeatures,omitempty"` Layer *Layer `protobuf:"bytes,1,opt,name=layer" json:"layer,omitempty"`
// The features detected in this layer.
DetectedFeatures []*Feature `protobuf:"bytes,2,rep,name=detected_features,json=detectedFeatures" json:"detected_features,omitempty"`
} }
func (m *GetAncestryResponse_AncestryLayer) Reset() { *m = GetAncestryResponse_AncestryLayer{} } func (m *GetAncestryResponse_AncestryLayer) Reset() { *m = GetAncestryResponse_AncestryLayer{} }
@ -373,7 +375,7 @@ type PostAncestryRequest struct {
// The format of the image being uploaded. // The format of the image being uploaded.
Format string `protobuf:"bytes,2,opt,name=format" json:"format,omitempty"` Format string `protobuf:"bytes,2,opt,name=format" json:"format,omitempty"`
// The layers to be scanned for this Ancestry, ordered in the way that i th // The layers to be scanned for this Ancestry, ordered in the way that i th
// layer is the i + 1 th layer's parent. // layer is the parent of i + 1 th layer.
Layers []*PostAncestryRequest_PostLayer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"` Layers []*PostAncestryRequest_PostLayer `protobuf:"bytes,3,rep,name=layers" json:"layers,omitempty"`
} }
@ -1023,85 +1025,85 @@ var _StatusService_serviceDesc = grpc.ServiceDesc{
func init() { proto.RegisterFile("api/v3/clairpb/clair.proto", fileDescriptor0) } func init() { proto.RegisterFile("api/v3/clairpb/clair.proto", fileDescriptor0) }
var fileDescriptor0 = []byte{ var fileDescriptor0 = []byte{
// 1269 bytes of a gzipped FileDescriptorProto // 1268 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0x4b, 0x6f, 0x1b, 0xd5, 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0x4b, 0x6f, 0x1b, 0x55,
0x17, 0xd7, 0xd8, 0x71, 0x6c, 0x1f, 0xdb, 0x49, 0x7a, 0xed, 0xa6, 0x93, 0x49, 0x1f, 0xc9, 0xfc, 0x14, 0xd6, 0x38, 0x71, 0x6c, 0x1f, 0xdb, 0x49, 0x7a, 0xed, 0xa6, 0x93, 0x49, 0x1f, 0xc9, 0x40,
0xff, 0x55, 0x4b, 0x8b, 0x6c, 0xd5, 0x65, 0x51, 0xca, 0x02, 0xa5, 0x8f, 0x84, 0x4a, 0xa5, 0xaa, 0xd5, 0xd2, 0x22, 0x5b, 0x75, 0x59, 0x94, 0xb2, 0x40, 0xe9, 0x23, 0xa1, 0x52, 0xa9, 0xaa, 0x29,
0xa6, 0x90, 0x05, 0x08, 0x59, 0xd7, 0x33, 0xc7, 0xc9, 0x28, 0xe3, 0x19, 0x33, 0xf7, 0xda, 0xa9, 0x64, 0x01, 0x42, 0xd6, 0xcd, 0xcc, 0x71, 0x32, 0xca, 0x78, 0xc6, 0xcc, 0xbd, 0x4e, 0x6a, 0x55,
0x55, 0x75, 0xc3, 0x96, 0x15, 0xb0, 0xe0, 0x33, 0xb0, 0xe1, 0x1b, 0xb0, 0x62, 0x8b, 0x84, 0x04, 0x65, 0xc1, 0x96, 0x15, 0xb0, 0xe0, 0x37, 0xb0, 0xe1, 0x1f, 0xb0, 0x62, 0xcb, 0x02, 0xc1, 0x16,
0x5b, 0xd8, 0xb1, 0xe0, 0x0b, 0xb0, 0x47, 0xf7, 0x31, 0x93, 0x19, 0xc7, 0x79, 0xb4, 0xac, 0x3c, 0x76, 0x2c, 0xf8, 0x03, 0xec, 0xd1, 0x7d, 0x4d, 0x66, 0x12, 0x37, 0x49, 0xcb, 0xca, 0x73, 0xde,
0xe7, 0xfd, 0xfa, 0xdd, 0x73, 0x12, 0xb0, 0xe8, 0xc8, 0xef, 0x4c, 0xee, 0x76, 0xdc, 0x80, 0xfa, 0xaf, 0xef, 0x9e, 0x93, 0x80, 0x43, 0x47, 0x61, 0x77, 0xff, 0x76, 0xd7, 0x8f, 0x68, 0x98, 0x8e,
0xf1, 0xa8, 0xaf, 0x7e, 0xdb, 0xa3, 0x38, 0xe2, 0x11, 0xa9, 0xbb, 0x51, 0x8c, 0x11, 0x6b, 0x4b, 0xb6, 0xd5, 0x6f, 0x67, 0x94, 0x26, 0x3c, 0x21, 0x0d, 0x3f, 0x49, 0x31, 0x61, 0x1d, 0xc9, 0x73,
0x9e, 0x75, 0x6d, 0x2f, 0x8a, 0xf6, 0x02, 0xec, 0x48, 0x59, 0x7f, 0x3c, 0xe8, 0x70, 0x7f, 0x88, 0xae, 0xec, 0x24, 0xc9, 0x4e, 0x84, 0x5d, 0x29, 0xdb, 0x1e, 0x0f, 0xba, 0x3c, 0x1c, 0x22, 0xe3,
0x8c, 0xd3, 0xe1, 0x48, 0xa9, 0x5b, 0x97, 0xb5, 0x82, 0xf0, 0x48, 0xc3, 0x30, 0xe2, 0x94, 0xfb, 0x74, 0x38, 0x52, 0xea, 0xce, 0x45, 0xad, 0x20, 0x3c, 0xd2, 0x38, 0x4e, 0x38, 0xe5, 0x61, 0x12,
0x51, 0xc8, 0x94, 0xd4, 0xfe, 0xbe, 0x00, 0x8d, 0xdd, 0x71, 0x10, 0x62, 0x4c, 0xfb, 0x7e, 0xe0, 0x33, 0x25, 0x75, 0x7f, 0x28, 0x41, 0x73, 0x6b, 0x1c, 0xc5, 0x98, 0xd2, 0xed, 0x30, 0x0a, 0xf9,
0xf3, 0x29, 0x21, 0xb0, 0x10, 0xd2, 0x21, 0x9a, 0xc6, 0x86, 0x71, 0xb3, 0xea, 0xc8, 0x6f, 0x72, 0x84, 0x10, 0x98, 0x8d, 0xe9, 0x10, 0x6d, 0x6b, 0xd5, 0xba, 0x5e, 0xf3, 0xe4, 0x37, 0xb9, 0x0a,
0x1d, 0x96, 0xc4, 0x2f, 0x1b, 0x51, 0x17, 0x7b, 0x52, 0x5a, 0x90, 0xd2, 0x46, 0xca, 0x7d, 0x26, 0xf3, 0xe2, 0x97, 0x8d, 0xa8, 0x8f, 0x7d, 0x29, 0x2d, 0x49, 0x69, 0x33, 0xe3, 0x3e, 0x11, 0x6a,
0xd4, 0x36, 0xa0, 0xe6, 0x21, 0x73, 0x63, 0x7f, 0x24, 0x42, 0x98, 0x45, 0xa9, 0x93, 0x65, 0x09, 0xab, 0x50, 0x0f, 0x90, 0xf9, 0x69, 0x38, 0x12, 0x21, 0xec, 0x19, 0xa9, 0x93, 0x67, 0x09, 0xe7,
0xe7, 0x81, 0x1f, 0x1e, 0x98, 0x0b, 0xca, 0xb9, 0xf8, 0x26, 0x16, 0x54, 0x18, 0x4e, 0x30, 0xf6, 0x51, 0x18, 0xef, 0xd9, 0xb3, 0xca, 0xb9, 0xf8, 0x26, 0x0e, 0x54, 0x19, 0xee, 0x63, 0x1a, 0xf2,
0xf9, 0xd4, 0x2c, 0x49, 0x7e, 0x4a, 0x0b, 0xd9, 0x10, 0x39, 0xf5, 0x28, 0xa7, 0xe6, 0xa2, 0x92, 0x89, 0x5d, 0x96, 0xfc, 0x8c, 0x16, 0xb2, 0x21, 0x72, 0x1a, 0x50, 0x4e, 0xed, 0x39, 0x25, 0x33,
0x25, 0x34, 0x59, 0x83, 0xca, 0xc0, 0x7f, 0x89, 0x5e, 0xaf, 0x3f, 0x35, 0xcb, 0x52, 0x56, 0x96, 0x34, 0x59, 0x86, 0xea, 0x20, 0x7c, 0x8e, 0x41, 0x7f, 0x7b, 0x62, 0x57, 0xa4, 0xac, 0x22, 0xe9,
0xf4, 0x83, 0x29, 0x79, 0x00, 0x17, 0xe8, 0x60, 0x80, 0x2e, 0x47, 0xaf, 0x37, 0xc1, 0x98, 0x89, 0x7b, 0x13, 0x72, 0x0f, 0xce, 0xd1, 0xc1, 0x00, 0x7d, 0x8e, 0x41, 0x7f, 0x1f, 0x53, 0x26, 0x0a,
0x82, 0xcd, 0xca, 0x46, 0xf1, 0x66, 0xad, 0x7b, 0xb1, 0x9d, 0x6d, 0x5f, 0x7b, 0x1b, 0x29, 0x1f, 0xb6, 0xab, 0xab, 0x33, 0xd7, 0xeb, 0xbd, 0xf3, 0x9d, 0x7c, 0xfb, 0x3a, 0x1b, 0x48, 0xf9, 0x38,
0xc7, 0xe8, 0xac, 0x24, 0xfa, 0xbb, 0x5a, 0xdd, 0xfe, 0xc5, 0x80, 0xb2, 0x96, 0xfe, 0x97, 0x9e, 0x45, 0x6f, 0xd1, 0xe8, 0x6f, 0x69, 0x75, 0xf7, 0x57, 0x0b, 0x2a, 0x5a, 0xfa, 0x7f, 0x7a, 0x62,
0x98, 0x50, 0xd6, 0x19, 0xe8, 0x7e, 0x24, 0xa4, 0x70, 0xa0, 0x3f, 0x7b, 0x83, 0x28, 0x1e, 0x52, 0x43, 0x45, 0x67, 0xa0, 0xfb, 0x61, 0x48, 0xe1, 0x40, 0x7f, 0xf6, 0x07, 0x49, 0x3a, 0xa4, 0x5c,
0xae, 0xbb, 0xd2, 0xd0, 0xdc, 0x6d, 0xc9, 0x24, 0x8f, 0x61, 0x79, 0x92, 0x19, 0x90, 0x8f, 0xcc, 0x77, 0xa5, 0xa9, 0xb9, 0x1b, 0x92, 0x49, 0x1e, 0xc2, 0xc2, 0x7e, 0x6e, 0x40, 0x21, 0x32, 0xbb,
0x2c, 0xc9, 0x4a, 0xd6, 0xf3, 0x95, 0xe4, 0xa6, 0xe8, 0xcc, 0xda, 0xd8, 0xeb, 0x50, 0x7a, 0x4a, 0x2c, 0x2b, 0x59, 0x29, 0x56, 0x52, 0x98, 0xa2, 0x77, 0xd4, 0xc6, 0x5d, 0x81, 0xf2, 0x63, 0x3a,
0xa7, 0x18, 0x8b, 0x5a, 0xf6, 0x29, 0xdb, 0x4f, 0x6a, 0x11, 0xdf, 0xf6, 0xd7, 0x06, 0xd4, 0x1e, 0xc1, 0x54, 0xd4, 0xb2, 0x4b, 0xd9, 0xae, 0xa9, 0x45, 0x7c, 0xbb, 0xdf, 0x58, 0x50, 0xbf, 0x2f,
0x0a, 0x2f, 0x2f, 0x38, 0xe5, 0x63, 0x26, 0x92, 0x0e, 0x7c, 0xc6, 0x31, 0x66, 0xa6, 0xb1, 0x51, 0xbc, 0x3c, 0xe3, 0x94, 0x8f, 0x99, 0x48, 0x3a, 0x0a, 0x19, 0xc7, 0x94, 0xd9, 0xd6, 0xea, 0x8c,
0x14, 0x49, 0x6b, 0x92, 0x5c, 0x86, 0xaa, 0x87, 0x1c, 0x5d, 0x1e, 0xc5, 0xcc, 0x2c, 0x48, 0xd9, 0x48, 0x5a, 0x93, 0xe4, 0x22, 0xd4, 0x02, 0xe4, 0xe8, 0xf3, 0x24, 0x65, 0x76, 0x49, 0xca, 0x0e,
0x11, 0x83, 0x3c, 0x82, 0x95, 0x80, 0x32, 0xde, 0x1b, 0x8f, 0x3c, 0xca, 0xb1, 0x27, 0xa0, 0x28, 0x19, 0xe4, 0x01, 0x2c, 0x46, 0x94, 0xf1, 0xfe, 0x78, 0x14, 0x50, 0x8e, 0x7d, 0x01, 0x45, 0x59,
0xab, 0xae, 0x75, 0xad, 0xb6, 0x82, 0x61, 0x3b, 0xc1, 0x69, 0xfb, 0x93, 0x04, 0xa7, 0xce, 0x92, 0x75, 0xbd, 0xe7, 0x74, 0x14, 0x0c, 0x3b, 0x06, 0xa7, 0x9d, 0x4f, 0x0c, 0x4e, 0xbd, 0x79, 0x61,
0xb0, 0xf9, 0x54, 0x9a, 0x08, 0xa6, 0xfd, 0x8d, 0x01, 0x64, 0x07, 0xf9, 0x56, 0xe8, 0x22, 0xe3, 0xf3, 0xa9, 0x34, 0x11, 0x4c, 0xf7, 0x5b, 0x0b, 0xc8, 0x26, 0xf2, 0xf5, 0xd8, 0x47, 0xc6, 0xd3,
0xf1, 0xd4, 0xc1, 0x2f, 0xc7, 0xc8, 0x38, 0xf9, 0x1f, 0x34, 0xa8, 0x66, 0xf5, 0x32, 0xd3, 0xa8, 0x89, 0x87, 0x5f, 0x8e, 0x91, 0x71, 0xf2, 0x16, 0x34, 0xa9, 0x66, 0xf5, 0x73, 0xd3, 0x68, 0x18,
0x27, 0x4c, 0xd9, 0xee, 0x3b, 0xd0, 0x3a, 0xf4, 0xf9, 0x7e, 0x6f, 0xb6, 0x65, 0x62, 0x36, 0x15, 0xa6, 0x6c, 0xf7, 0x2d, 0x68, 0x1f, 0x84, 0x7c, 0xb7, 0x7f, 0xb4, 0x65, 0x62, 0x36, 0x55, 0xaf,
0xa7, 0x29, 0x64, 0xbb, 0x79, 0x91, 0xf0, 0x2b, 0x4d, 0x06, 0x6a, 0xd8, 0x4c, 0x66, 0x5c, 0x71, 0x25, 0x64, 0x5b, 0x45, 0x91, 0xf0, 0x2b, 0x4d, 0x06, 0x6a, 0xd8, 0x4c, 0x66, 0x5c, 0xf5, 0x1a,
0xea, 0x82, 0xa9, 0x01, 0xc0, 0xec, 0x5f, 0x8b, 0xd0, 0xcc, 0xe5, 0xc4, 0x46, 0x51, 0xc8, 0x90, 0x82, 0xa9, 0x01, 0xc0, 0xdc, 0xdf, 0x66, 0xa0, 0x55, 0xc8, 0x89, 0x8d, 0x92, 0x98, 0x21, 0xd9,
0x6c, 0x43, 0x25, 0x89, 0x2f, 0xf3, 0xa9, 0x75, 0x6f, 0xe5, 0xc7, 0x32, 0xc7, 0xa8, 0x9d, 0x32, 0x80, 0xaa, 0x89, 0x2f, 0xf3, 0xa9, 0xf7, 0x6e, 0x14, 0xc7, 0x32, 0xc5, 0xa8, 0x93, 0x31, 0x32,
0x52, 0x5b, 0x72, 0x07, 0x16, 0x99, 0xec, 0xbd, 0xcc, 0xb4, 0xd6, 0x5d, 0xcb, 0x7b, 0xc9, 0x0c, 0x5b, 0x72, 0x0b, 0xe6, 0x98, 0xec, 0xbd, 0xcc, 0xb4, 0xde, 0x5b, 0x2e, 0x7a, 0xc9, 0x0d, 0xc7,
0xc7, 0xd1, 0x8a, 0xd6, 0x6b, 0x68, 0x24, 0x8e, 0xd4, 0x64, 0xdf, 0x81, 0x52, 0x20, 0x3e, 0x74, 0xd3, 0x8a, 0xce, 0x57, 0xd0, 0x34, 0x8e, 0xd4, 0x64, 0xdf, 0x81, 0x72, 0x24, 0x3e, 0x74, 0x22,
0x22, 0xcd, 0xbc, 0x0b, 0xa9, 0xe3, 0x28, 0x0d, 0xb2, 0x05, 0x2b, 0x6a, 0x6a, 0xe8, 0x25, 0x25, 0xad, 0xa2, 0x0b, 0xa9, 0xe3, 0x29, 0x0d, 0xf1, 0x40, 0xd4, 0xd4, 0x30, 0x38, 0xac, 0xbb, 0x74,
0xca, 0x69, 0x9e, 0xfc, 0x3e, 0x66, 0xd5, 0xad, 0x9f, 0x0c, 0xa8, 0x24, 0xf1, 0xe7, 0x3e, 0x90, 0xe2, 0x03, 0x31, 0xfa, 0xa6, 0x25, 0xce, 0xcf, 0x16, 0x54, 0x4d, 0x02, 0x53, 0x5f, 0xc8, 0x35,
0x1b, 0xb0, 0xcc, 0x5c, 0x1a, 0x86, 0xe8, 0xf5, 0x12, 0x30, 0x2d, 0x48, 0xc0, 0x2c, 0x69, 0xf6, 0x58, 0x60, 0x3e, 0x8d, 0x63, 0x0c, 0xfa, 0x06, 0x4d, 0xb3, 0x12, 0x31, 0xf3, 0x9a, 0xfd, 0x58,
0x53, 0x8d, 0xa9, 0xdb, 0x70, 0x21, 0x51, 0x3c, 0xc2, 0x56, 0x49, 0xaa, 0xae, 0x68, 0xc1, 0xa3, 0x83, 0xea, 0x26, 0x9c, 0x33, 0x8a, 0x87, 0xe0, 0x2a, 0x4b, 0xd5, 0x45, 0x2d, 0x78, 0x90, 0x61,
0x14, 0x62, 0x3b, 0xb0, 0x28, 0x4b, 0x60, 0xe6, 0xa2, 0xcc, 0xb7, 0x73, 0xfe, 0x76, 0xab, 0x0e, 0x6c, 0x13, 0xe6, 0x64, 0x0d, 0xcc, 0x9e, 0x93, 0xf9, 0x76, 0xcf, 0xde, 0x6f, 0xd5, 0x02, 0x6d,
0x68, 0x73, 0xfb, 0xcf, 0x02, 0x34, 0x9f, 0x47, 0xec, 0xed, 0x60, 0xb6, 0x0a, 0x8b, 0xfa, 0xcd, 0xee, 0xfe, 0x55, 0x82, 0xd6, 0xd3, 0x84, 0xbd, 0x19, 0xce, 0x96, 0x60, 0x4e, 0x3f, 0x5a, 0xf5,
0xaa, 0x47, 0xaf, 0x29, 0xf2, 0x30, 0xcd, 0xae, 0x28, 0xb3, 0xbb, 0x9d, 0xcf, 0x6e, 0x4e, 0x3c, 0xea, 0x35, 0x45, 0xee, 0x67, 0xd9, 0xcd, 0xc8, 0xec, 0x6e, 0x16, 0xb3, 0x9b, 0x12, 0x4f, 0xf2,
0xc9, 0xcb, 0x65, 0x66, 0xfd, 0x6c, 0x40, 0x35, 0xe5, 0xce, 0x7b, 0xaf, 0x82, 0x37, 0xa2, 0x7c, 0x0a, 0x99, 0x39, 0xbf, 0x58, 0x50, 0xcb, 0xb8, 0xd3, 0x1e, 0xac, 0xe0, 0x8d, 0x28, 0xdf, 0xd5,
0x5f, 0x07, 0x97, 0xdf, 0xc4, 0x81, 0xf2, 0x3e, 0x52, 0xef, 0x28, 0xf6, 0xbd, 0x37, 0x88, 0xdd, 0xc1, 0xe5, 0x37, 0xf1, 0xa0, 0xb2, 0x8b, 0x34, 0x38, 0x8c, 0x7d, 0xe7, 0x35, 0x62, 0x77, 0x3e,
0xfe, 0x48, 0x99, 0x3e, 0x0e, 0x85, 0x34, 0x71, 0x64, 0xdd, 0x87, 0x7a, 0x56, 0x40, 0x56, 0xa0, 0x52, 0xa6, 0x0f, 0x63, 0x21, 0x35, 0x8e, 0x9c, 0xbb, 0xd0, 0xc8, 0x0b, 0xc8, 0x22, 0xcc, 0xec,
0x78, 0x80, 0x53, 0x9d, 0x8a, 0xf8, 0x24, 0x2d, 0x28, 0x4d, 0x68, 0x30, 0x4e, 0x96, 0x9f, 0x22, 0xe1, 0x44, 0xa7, 0x22, 0x3e, 0x49, 0x1b, 0xca, 0xfb, 0x34, 0x1a, 0x9b, 0xed, 0xa7, 0x88, 0xbb,
0xee, 0x17, 0xee, 0x19, 0xf6, 0x13, 0x68, 0xe5, 0x43, 0xea, 0x17, 0x73, 0x84, 0x74, 0xe3, 0x9c, 0xa5, 0x3b, 0x96, 0xfb, 0x08, 0xda, 0xc5, 0x90, 0xfa, 0xc9, 0x1c, 0x42, 0xdd, 0x3a, 0x23, 0xd4,
0x48, 0xb7, 0x7f, 0x34, 0x60, 0x75, 0x07, 0xf9, 0xb3, 0x88, 0xfb, 0x03, 0xdf, 0x95, 0xf7, 0x2b, 0xdd, 0x9f, 0x2c, 0x58, 0xda, 0x44, 0xfe, 0x24, 0xe1, 0xe1, 0x20, 0xf4, 0xe5, 0x01, 0x33, 0xd3,
0x99, 0xd6, 0x7b, 0xb0, 0x1a, 0x05, 0x5e, 0xee, 0xb9, 0x4f, 0x7b, 0x23, 0xba, 0x97, 0x8c, 0xad, 0x7a, 0x0f, 0x96, 0x92, 0x28, 0x28, 0xbc, 0xf7, 0x49, 0x7f, 0x44, 0x77, 0xcc, 0xd8, 0xda, 0x49,
0x15, 0x05, 0x5e, 0x6e, 0x33, 0x3e, 0xa7, 0x7b, 0x28, 0xac, 0x42, 0x3c, 0x9c, 0x67, 0xa5, 0xca, 0x14, 0x14, 0x56, 0xe3, 0x53, 0xba, 0x83, 0xc2, 0x2a, 0xc6, 0x83, 0x69, 0x56, 0xaa, 0x8c, 0x76,
0x68, 0x85, 0x78, 0x78, 0xdc, 0xaa, 0x05, 0xa5, 0xc0, 0x1f, 0xfa, 0x5c, 0x2e, 0x88, 0x92, 0xa3, 0x8c, 0x07, 0xc7, 0xad, 0xda, 0x50, 0x8e, 0xc2, 0x61, 0xc8, 0xe5, 0x86, 0x28, 0x7b, 0x8a, 0xc8,
0x88, 0x14, 0xfa, 0x0b, 0x47, 0xd0, 0xb7, 0xff, 0x28, 0xc0, 0xa5, 0x63, 0x09, 0xeb, 0xfa, 0x77, 0xa0, 0x3f, 0x7b, 0x08, 0x7d, 0xf7, 0xcf, 0x12, 0x5c, 0x38, 0x96, 0xb0, 0xae, 0x7f, 0x0b, 0x1a,
0xa1, 0x1e, 0x66, 0xf8, 0xba, 0x0b, 0xdd, 0x63, 0x30, 0x9e, 0x67, 0xdc, 0xce, 0x31, 0x73, 0x7e, 0x71, 0x8e, 0xaf, 0xbb, 0xd0, 0x3b, 0x06, 0xe3, 0x69, 0xc6, 0x9d, 0x02, 0xb3, 0xe0, 0xc7, 0xf9,
0xac, 0xbf, 0x0d, 0xa8, 0x67, 0xc5, 0x73, 0xdf, 0xa4, 0x09, 0x65, 0x37, 0x46, 0xca, 0xd1, 0xd3, 0xc7, 0x82, 0x46, 0x5e, 0x3c, 0xf5, 0x4d, 0xda, 0x50, 0xf1, 0x53, 0xa4, 0x1c, 0x03, 0x5d, 0xa9,
0x95, 0x26, 0xa4, 0xb8, 0xb4, 0xca, 0x1d, 0x7a, 0xfa, 0x50, 0xa5, 0xb4, 0xb0, 0xf2, 0x30, 0x40, 0x21, 0xc5, 0xa9, 0x55, 0xee, 0x30, 0xd0, 0x97, 0x2a, 0xa3, 0x85, 0x55, 0x80, 0x11, 0x0a, 0x2b,
0x61, 0xa5, 0xaa, 0x4c, 0x48, 0xf2, 0x3e, 0x14, 0xa3, 0xc0, 0x93, 0x67, 0xbb, 0xd6, 0xbd, 0x31, 0x55, 0xa5, 0x21, 0xc9, 0xfb, 0x30, 0x93, 0x44, 0x81, 0xbc, 0xdb, 0xf5, 0xde, 0xb5, 0x23, 0x80,
0x03, 0x38, 0xba, 0x87, 0x69, 0xef, 0x03, 0xd4, 0x40, 0xf0, 0x91, 0x39, 0xc2, 0x46, 0x98, 0x86, 0xa3, 0x3b, 0x98, 0xf5, 0x3e, 0x42, 0x0d, 0x84, 0x10, 0x99, 0x27, 0x6c, 0x84, 0x69, 0x8c, 0x07,
0x78, 0x28, 0xaf, 0xfa, 0x9b, 0x98, 0x86, 0x78, 0x68, 0xff, 0x56, 0x80, 0xb5, 0x13, 0x55, 0xc8, 0xf2, 0xac, 0xbf, 0x8e, 0x69, 0x8c, 0x07, 0xee, 0xef, 0x25, 0x58, 0x7e, 0xa5, 0x0a, 0x59, 0x83,
0x26, 0xd4, 0xdd, 0x71, 0x1c, 0x63, 0xc8, 0xb3, 0x40, 0xa8, 0x69, 0x9e, 0x9c, 0xe4, 0x3a, 0x54, 0x86, 0x3f, 0x4e, 0x53, 0x8c, 0x79, 0x1e, 0x08, 0x75, 0xcd, 0x93, 0x93, 0x5c, 0x81, 0x5a, 0x8c,
0x43, 0x7c, 0xc9, 0xb3, 0x23, 0xaf, 0x08, 0xc6, 0x29, 0x63, 0xde, 0x82, 0x46, 0x0e, 0x2e, 0xb2, 0xcf, 0x79, 0x7e, 0xe4, 0x55, 0xc1, 0x38, 0x61, 0xcc, 0xeb, 0xd0, 0x2c, 0xc0, 0x45, 0x76, 0xe2,
0x13, 0x67, 0x1c, 0xe1, 0xbc, 0x05, 0xf9, 0x1c, 0x80, 0xa6, 0x69, 0xea, 0x23, 0xfe, 0xc1, 0x39, 0x94, 0x2b, 0x5c, 0xb4, 0x20, 0x9f, 0x03, 0xd0, 0x2c, 0x4d, 0x7d, 0xc5, 0x3f, 0x38, 0x63, 0xe1,
0x0b, 0x6f, 0x3f, 0x09, 0x3d, 0x7c, 0x89, 0xde, 0x56, 0x66, 0x0b, 0x39, 0x19, 0x77, 0xd6, 0x87, 0x9d, 0x47, 0x71, 0x80, 0xcf, 0x31, 0x58, 0xcf, 0x6d, 0x21, 0x2f, 0xe7, 0xce, 0xf9, 0x10, 0x5a,
0xd0, 0x9c, 0xa3, 0x22, 0x8a, 0xf1, 0x05, 0x5b, 0x76, 0xa1, 0xe4, 0x28, 0x22, 0x85, 0x46, 0x21, 0x53, 0x54, 0x44, 0x31, 0xa1, 0x60, 0xcb, 0x2e, 0x94, 0x3d, 0x45, 0x64, 0xd0, 0x28, 0xe5, 0x30,
0x83, 0xd9, 0xbb, 0x70, 0xe5, 0x63, 0x1a, 0x1f, 0x64, 0x21, 0xb4, 0xc5, 0x1c, 0xa4, 0x5e, 0xf2, 0x7b, 0x1b, 0x2e, 0x7d, 0x4c, 0xd3, 0xbd, 0x3c, 0x84, 0xd6, 0x99, 0x87, 0x34, 0x30, 0x4f, 0x6d,
0xd4, 0xe6, 0xe0, 0xc9, 0xde, 0x80, 0xab, 0x27, 0x19, 0x29, 0xc4, 0xda, 0x04, 0x56, 0x76, 0x90, 0x0a, 0x9e, 0xdc, 0x55, 0xb8, 0xfc, 0x2a, 0x23, 0x85, 0x58, 0x97, 0xc0, 0xe2, 0x26, 0x72, 0xfd,
0xeb, 0x07, 0xad, 0x3c, 0xd9, 0xdb, 0x70, 0x21, 0xc3, 0x7b, 0xeb, 0xbd, 0xd0, 0xfd, 0xc7, 0x80, 0xa0, 0x95, 0x27, 0x77, 0x03, 0xce, 0xe5, 0x78, 0x6f, 0xbc, 0x17, 0x7a, 0xff, 0x5a, 0xb0, 0x60,
0xe5, 0xa4, 0xda, 0x17, 0x18, 0x4f, 0x7c, 0x17, 0xc9, 0x18, 0x6a, 0x99, 0x1b, 0x40, 0x36, 0x4e, 0xaa, 0x7d, 0x86, 0xe9, 0x7e, 0xe8, 0x23, 0x19, 0x43, 0x3d, 0x77, 0x03, 0xc8, 0xea, 0x09, 0xe7,
0x39, 0x0f, 0x32, 0x19, 0x6b, 0xf3, 0xcc, 0x03, 0x62, 0x6f, 0x7e, 0xf5, 0xfb, 0x5f, 0xdf, 0x15, 0x41, 0x26, 0xe3, 0xac, 0x9d, 0x7a, 0x40, 0xdc, 0xb5, 0xaf, 0xff, 0xf8, 0xfb, 0xfb, 0xd2, 0x0a,
0xd6, 0xc9, 0x5a, 0x27, 0x39, 0x02, 0x9d, 0x57, 0xb9, 0x1b, 0xf1, 0x9a, 0x1c, 0x40, 0x3d, 0xbb, 0x59, 0xee, 0x9a, 0x23, 0xd0, 0x7d, 0x51, 0xb8, 0x11, 0x2f, 0xc9, 0x1e, 0x34, 0xf2, 0xdb, 0x8e,
0xed, 0xc8, 0xe6, 0x99, 0xcb, 0xd7, 0xb2, 0x4f, 0x53, 0xd1, 0x91, 0x5b, 0x32, 0xf2, 0x92, 0x5d, 0xac, 0x9d, 0xba, 0x7c, 0x1d, 0xf7, 0x24, 0x15, 0x1d, 0xb9, 0x2d, 0x23, 0xcf, 0xbb, 0xb5, 0x2c,
0x4d, 0x23, 0xdf, 0x37, 0x6e, 0x75, 0x7f, 0x28, 0x40, 0x33, 0xdb, 0xf2, 0xa4, 0xf6, 0xd7, 0xb0, 0xf2, 0x5d, 0xeb, 0x46, 0xef, 0xc7, 0x12, 0xb4, 0xf2, 0x2d, 0x37, 0xb5, 0xbf, 0x84, 0x85, 0x23,
0x3c, 0xb3, 0x38, 0xc8, 0xff, 0xcf, 0xd8, 0x2b, 0x2a, 0x95, 0xeb, 0xe7, 0xda, 0x3e, 0xf6, 0x15, 0x8b, 0x83, 0xbc, 0x7d, 0xca, 0x5e, 0x51, 0xa9, 0x5c, 0x3d, 0xd3, 0xf6, 0x71, 0x2f, 0xc9, 0x6c,
0x99, 0xcd, 0x25, 0x72, 0xb1, 0x93, 0xdd, 0x3c, 0xac, 0xf3, 0x4a, 0xf5, 0xe0, 0x5b, 0x03, 0x56, 0x2e, 0x90, 0xf3, 0xdd, 0xfc, 0xe6, 0x61, 0xdd, 0x17, 0xaa, 0x07, 0xdf, 0x59, 0xb0, 0x34, 0x1d,
0xe7, 0xa3, 0x81, 0xcc, 0xdc, 0xc1, 0x53, 0x81, 0x66, 0xbd, 0x7b, 0x3e, 0xe5, 0x7c, 0x52, 0xb7, 0x0d, 0xe4, 0xc8, 0x1d, 0x3c, 0x11, 0x68, 0xce, 0xbb, 0x67, 0x53, 0x2e, 0x26, 0x75, 0x63, 0x7a,
0xe6, 0x27, 0xd5, 0x0d, 0xa1, 0xa1, 0x50, 0x93, 0x34, 0xe9, 0x0b, 0xa8, 0xa6, 0xe0, 0x23, 0x57, 0x52, 0xbd, 0x18, 0x9a, 0x0a, 0x35, 0xa6, 0x49, 0x5f, 0x40, 0x2d, 0x03, 0x1f, 0xb9, 0x7c, 0xac,
0x8f, 0x15, 0x9e, 0x43, 0xaa, 0x75, 0xed, 0x44, 0xb9, 0x8e, 0xbe, 0x2c, 0xa3, 0x57, 0x49, 0xb9, 0xf0, 0x02, 0x52, 0x9d, 0x2b, 0xaf, 0x94, 0xeb, 0xe8, 0x0b, 0x32, 0x7a, 0x8d, 0x54, 0xba, 0x0a,
0xa3, 0x30, 0xf9, 0xe0, 0x2a, 0x34, 0xdd, 0x68, 0x98, 0x37, 0x1b, 0xf5, 0x3f, 0x2b, 0xeb, 0xff, 0x93, 0xf7, 0x2e, 0x43, 0xcb, 0x4f, 0x86, 0x45, 0xb3, 0xd1, 0xf6, 0x67, 0x15, 0xfd, 0xaf, 0xdc,
0xe4, 0xfa, 0x8b, 0xf2, 0x0f, 0xe0, 0xbb, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x9d, 0x0c, 0x7d, 0xf6, 0x9c, 0xfc, 0x0b, 0xf8, 0xf6, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xc7, 0xaf, 0x17, 0x4a,
0x93, 0xe2, 0x0d, 0x00, 0x00, 0xe3, 0x0d, 0x00, 0x00,
} }

View File

@ -99,9 +99,8 @@ message GetAncestryResponse {
// The layer's information. // The layer's information.
Layer layer = 1; Layer layer = 1;
// The features detected in this layer. // The features detected in this layer.
repeated Feature detectedFeatures = 2; repeated Feature detected_features = 2;
} }
message Ancestry { message Ancestry {
// The name of the desired ancestry. // The name of the desired ancestry.
string name = 1; string name = 1;

View File

@ -206,13 +206,15 @@
"type": "object", "type": "object",
"properties": { "properties": {
"layer": { "layer": {
"$ref": "#/definitions/clairLayer" "$ref": "#/definitions/clairLayer",
"description": "The layer's information."
}, },
"detectedFeatures": { "detected_features": {
"type": "array", "type": "array",
"items": { "items": {
"$ref": "#/definitions/clairFeature" "$ref": "#/definitions/clairFeature"
} },
"description": "The features detected in this layer."
} }
} }
}, },
@ -419,7 +421,7 @@
"items": { "items": {
"$ref": "#/definitions/PostAncestryRequestPostLayer" "$ref": "#/definitions/PostAncestryRequestPostLayer"
}, },
"description": "The layers to be scanned for this Ancestry, ordered in the way that i th\nlayer is the i + 1 th layer's parent." "description": "The layers to be scanned for this Ancestry, ordered in the way that i th\nlayer is the parent of i + 1 th layer."
} }
} }
}, },

View File

@ -124,11 +124,17 @@ func VulnerabilityWithFixedInFromDatabaseModel(dbVuln database.VulnerabilityWith
// AncestryFromDatabaseModel converts database ancestry to api ancestry. // AncestryFromDatabaseModel converts database ancestry to api ancestry.
func AncestryFromDatabaseModel(dbAncestry database.Ancestry) *GetAncestryResponse_Ancestry { func AncestryFromDatabaseModel(dbAncestry database.Ancestry) *GetAncestryResponse_Ancestry {
ancestry := &GetAncestryResponse_Ancestry{Name: dbAncestry.Name} ancestry := &GetAncestryResponse_Ancestry{
Name: dbAncestry.Name,
ScannedDetectors: dbAncestry.ProcessedBy.Detectors,
ScannedListers: dbAncestry.ProcessedBy.Listers,
}
for _, layer := range dbAncestry.Layers { for _, layer := range dbAncestry.Layers {
ancestryLayer := &GetAncestryResponse_AncestryLayer{} ancestry.Layers = append(ancestry.Layers,
ancestryLayer.Layer = LayerFromDatabaseModel(layer) &GetAncestryResponse_AncestryLayer{
ancestry.Layers = append(ancestry.Layers, ancestryLayer) Layer: LayerFromDatabaseModel(layer),
})
} }
return ancestry return ancestry

View File

@ -130,13 +130,18 @@ func (s *AncestryServer) GetAncestry(ctx context.Context, req *pb.GetAncestryReq
return nil, status.Error(codes.NotFound, fmt.Sprintf("requested ancestry '%s' is not found", req.GetAncestryName())) return nil, status.Error(codes.NotFound, fmt.Sprintf("requested ancestry '%s' is not found", req.GetAncestryName()))
} }
respAncestry = &pb.GetAncestryResponse_Ancestry{Name: name} respAncestry = &pb.GetAncestryResponse_Ancestry{
respAncestry.ScannedDetectors = ancestry.ProcessedBy.Detectors Name: name,
respAncestry.ScannedListers = ancestry.ProcessedBy.Listers ScannedDetectors: ancestry.ProcessedBy.Detectors,
respAncestry.Layers = []*pb.GetAncestryResponse_AncestryLayer{} ScannedListers: ancestry.ProcessedBy.Listers,
}
for _, layer := range ancestry.Layers { for _, layer := range ancestry.Layers {
ancestryLayer := &pb.GetAncestryResponse_AncestryLayer{} ancestryLayer := &pb.GetAncestryResponse_AncestryLayer{
Layer: &pb.Layer{
Hash: layer.Hash,
},
}
if req.GetWithVulnerabilities() { if req.GetWithVulnerabilities() {
featureVulnerabilities, err := tx.FindAffectedNamespacedFeatures(layer.DetectedFeatures) featureVulnerabilities, err := tx.FindAffectedNamespacedFeatures(layer.DetectedFeatures)

View File

@ -91,18 +91,18 @@ type Session interface {
// UpsertAncestry inserts or replaces an ancestry and its namespaced // UpsertAncestry inserts or replaces an ancestry and its namespaced
// features and processors used to scan the ancestry. // features and processors used to scan the ancestry.
UpsertAncestry(ancestry Ancestry, features []NamespacedFeature, processedBy Processors) error UpsertAncestry(AncestryWithContent) error
// FindAncestry retrieves an ancestry with processors used to scan the // FindAncestry retrieves an ancestry with processors used to scan the
// ancestry. If the ancestry is not found, return false. // ancestry. If the ancestry is not found, return false.
// //
// The ancestry's processors are returned to short cut processing ancestry // The ancestry's processors are returned to short cut processing ancestry
// if it has been processed by all processors in the current Clair instance. // if it has been processed by all processors in the current Clair instance.
FindAncestry(name string) (ancestry Ancestry, processedBy Processors, found bool, err error) FindAncestry(name string) (ancestry Ancestry, found bool, err error)
// FindAncestryFeatures retrieves an ancestry with all detected namespaced // FindAncestryWithContent retrieves an ancestry with all detected
// features. If the ancestry is not found, return false. // namespaced features. If the ancestry is not found, return false.
FindAncestryFeatures(name string) (ancestry AncestryWithFeatures, found bool, err error) FindAncestryWithContent(name string) (ancestry AncestryWithContent, found bool, err error)
// PersistFeatures inserts a set of features if not in the database. // PersistFeatures inserts a set of features if not in the database.
PersistFeatures(features []Feature) error PersistFeatures(features []Feature) error
@ -125,8 +125,8 @@ type Session interface {
// PersistNamespaces inserts a set of namespaces if not in the database. // PersistNamespaces inserts a set of namespaces if not in the database.
PersistNamespaces([]Namespace) error PersistNamespaces([]Namespace) error
// PersistLayer inserts a layer if not in the datastore. // PersistLayer creates a layer using the blob Sum hash.
PersistLayer(Layer) error PersistLayer(hash string) error
// PersistLayerContent persists a layer's content in the database. The given // PersistLayerContent persists a layer's content in the database. The given
// namespaces and features can be partial content of this layer. // namespaces and features can be partial content of this layer.
@ -135,8 +135,8 @@ type Session interface {
// in the database. // in the database.
PersistLayerContent(hash string, namespaces []Namespace, features []Feature, processedBy Processors) error PersistLayerContent(hash string, namespaces []Namespace, features []Feature, processedBy Processors) error
// FindLayer retrieves a layer and the processors scanned the layer. // FindLayer retrieves the metadata of a layer.
FindLayer(hash string) (layer Layer, processedBy Processors, found bool, err error) FindLayer(hash string) (layer Layer, found bool, err error)
// FindLayerWithContent returns a layer with all detected features and // FindLayerWithContent returns a layer with all detected features and
// namespaces. // namespaces.

View File

@ -21,17 +21,17 @@ import "time"
type MockSession struct { type MockSession struct {
FctCommit func() error FctCommit func() error
FctRollback func() error FctRollback func() error
FctUpsertAncestry func(Ancestry, []NamespacedFeature, Processors) error FctUpsertAncestry func(AncestryWithContent) error
FctFindAncestry func(name string) (Ancestry, Processors, bool, error) FctFindAncestry func(name string) (Ancestry, bool, error)
FctFindAncestryFeatures func(name string) (AncestryWithFeatures, bool, error) FctFindAncestryWithContent func(name string) (AncestryWithContent, bool, error)
FctFindAffectedNamespacedFeatures func(features []NamespacedFeature) ([]NullableAffectedNamespacedFeature, error) FctFindAffectedNamespacedFeatures func(features []NamespacedFeature) ([]NullableAffectedNamespacedFeature, error)
FctPersistNamespaces func([]Namespace) error FctPersistNamespaces func([]Namespace) error
FctPersistFeatures func([]Feature) error FctPersistFeatures func([]Feature) error
FctPersistNamespacedFeatures func([]NamespacedFeature) error FctPersistNamespacedFeatures func([]NamespacedFeature) error
FctCacheAffectedNamespacedFeatures func([]NamespacedFeature) error FctCacheAffectedNamespacedFeatures func([]NamespacedFeature) error
FctPersistLayer func(Layer) error FctPersistLayer func(hash string) error
FctPersistLayerContent func(hash string, namespaces []Namespace, features []Feature, processedBy Processors) error FctPersistLayerContent func(hash string, namespaces []Namespace, features []Feature, processedBy Processors) error
FctFindLayer func(name string) (Layer, Processors, bool, error) FctFindLayer func(name string) (Layer, bool, error)
FctFindLayerWithContent func(name string) (LayerWithContent, bool, error) FctFindLayerWithContent func(name string) (LayerWithContent, bool, error)
FctInsertVulnerabilities func([]VulnerabilityWithAffected) error FctInsertVulnerabilities func([]VulnerabilityWithAffected) error
FctFindVulnerabilities func([]VulnerabilityID) ([]NullableVulnerability, error) FctFindVulnerabilities func([]VulnerabilityID) ([]NullableVulnerability, error)
@ -63,23 +63,23 @@ func (ms *MockSession) Rollback() error {
panic("required mock function not implemented") panic("required mock function not implemented")
} }
func (ms *MockSession) UpsertAncestry(ancestry Ancestry, features []NamespacedFeature, processedBy Processors) error { func (ms *MockSession) UpsertAncestry(ancestry AncestryWithContent) error {
if ms.FctUpsertAncestry != nil { if ms.FctUpsertAncestry != nil {
return ms.FctUpsertAncestry(ancestry, features, processedBy) return ms.FctUpsertAncestry(ancestry)
} }
panic("required mock function not implemented") panic("required mock function not implemented")
} }
func (ms *MockSession) FindAncestry(name string) (Ancestry, Processors, bool, error) { func (ms *MockSession) FindAncestry(name string) (Ancestry, bool, error) {
if ms.FctFindAncestry != nil { if ms.FctFindAncestry != nil {
return ms.FctFindAncestry(name) return ms.FctFindAncestry(name)
} }
panic("required mock function not implemented") panic("required mock function not implemented")
} }
func (ms *MockSession) FindAncestryFeatures(name string) (AncestryWithFeatures, bool, error) { func (ms *MockSession) FindAncestryWithContent(name string) (AncestryWithContent, bool, error) {
if ms.FctFindAncestryFeatures != nil { if ms.FctFindAncestryWithContent != nil {
return ms.FctFindAncestryFeatures(name) return ms.FctFindAncestryWithContent(name)
} }
panic("required mock function not implemented") panic("required mock function not implemented")
} }
@ -119,7 +119,7 @@ func (ms *MockSession) CacheAffectedNamespacedFeatures(namespacedFeatures []Name
panic("required mock function not implemented") panic("required mock function not implemented")
} }
func (ms *MockSession) PersistLayer(layer Layer) error { func (ms *MockSession) PersistLayer(layer string) error {
if ms.FctPersistLayer != nil { if ms.FctPersistLayer != nil {
return ms.FctPersistLayer(layer) return ms.FctPersistLayer(layer)
} }
@ -133,7 +133,7 @@ func (ms *MockSession) PersistLayerContent(hash string, namespaces []Namespace,
panic("required mock function not implemented") panic("required mock function not implemented")
} }
func (ms *MockSession) FindLayer(name string) (Layer, Processors, bool, error) { func (ms *MockSession) FindLayer(name string) (Layer, bool, error) {
if ms.FctFindLayer != nil { if ms.FctFindLayer != nil {
return ms.FctFindLayer(name) return ms.FctFindLayer(name)
} }

View File

@ -20,7 +20,7 @@ import (
"time" "time"
) )
// Processors are extentions to scan layer's content. // Processors are extentions to scan a layer's content.
type Processors struct { type Processors struct {
Listers []string Listers []string
Detectors []string Detectors []string
@ -29,24 +29,39 @@ type Processors struct {
// Ancestry is a manifest that keeps all layers in an image in order. // Ancestry is a manifest that keeps all layers in an image in order.
type Ancestry struct { type Ancestry struct {
Name string Name string
// ProcessedBy contains the processors that are used when computing the
// content of this ancestry.
ProcessedBy Processors
// Layers should be ordered and i_th layer is the parent of i+1_th layer in // Layers should be ordered and i_th layer is the parent of i+1_th layer in
// the slice. // the slice.
Layers []Layer Layers []Layer
} }
// AncestryWithFeatures is an ancestry with namespaced features detected in the // AncestryWithContent has the ancestry's name and the Ancestry Layers
// ancestry, which is processed by `ProcessedBy`. // associated with it.
type AncestryWithFeatures struct { type AncestryWithContent struct {
Ancestry Ancestry
ProcessedBy Processors // TODO(sidchen) deduplicate the Layers here and the Layers in
Features []NamespacedFeature // Ancestry.Layers.
// AncestryLayers should have the same order as Ancestry.Layers.
Layers []AncestryLayer
} }
// Layer corresponds to a layer in an image processed by `ProcessedBy`. // AncestryLayer is a layer with all detected namespaced features.
type AncestryLayer struct {
Layer
// DetectedFeatures are the features introduced by this layer.
DetectedFeatures []NamespacedFeature
}
// Layer contains the metadata of a layer.
type Layer struct { type Layer struct {
// Hash is content hash of the layer. // Hash is content hash of the layer.
Hash string Hash string
// ProcessedBy contains the processors that processed this layer.
ProcessedBy Processors
} }
// LayerWithContent is a layer with its detected namespaces and features by // LayerWithContent is a layer with its detected namespaces and features by
@ -54,9 +69,8 @@ type Layer struct {
type LayerWithContent struct { type LayerWithContent struct {
Layer Layer
ProcessedBy Processors Namespaces []Namespace
Namespaces []Namespace Features []Feature
Features []Feature
} }
// Namespace is the contextual information around features. // Namespace is the contextual information around features.
@ -198,6 +212,7 @@ type VulnerabilityNotificationWithVulnerable struct {
// PageNumber is used to do pagination. // PageNumber is used to do pagination.
type PageNumber string type PageNumber string
// MetadataMap is for storing the metadata returned by vulnerability database.
type MetadataMap map[string]interface{} type MetadataMap map[string]interface{}
// NullableAffectedNamespacedFeature is an affectednamespacedfeature with // NullableAffectedNamespacedFeature is an affectednamespacedfeature with

View File

@ -3,48 +3,37 @@ package pgsql
import ( import (
"database/sql" "database/sql"
"errors" "errors"
"fmt"
"strings"
"github.com/lib/pq"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/coreos/clair/database" "github.com/coreos/clair/database"
"github.com/coreos/clair/pkg/commonerr" "github.com/coreos/clair/pkg/commonerr"
) )
func (tx *pgSession) UpsertAncestry(ancestry database.Ancestry, features []database.NamespacedFeature, processedBy database.Processors) error { func (tx *pgSession) UpsertAncestry(ancestry database.AncestryWithContent) error {
if ancestry.Name == "" { if ancestry.Name == "" {
log.Warning("Empty ancestry name is not allowed") log.Error("Empty ancestry name is not allowed")
return commonerr.NewBadRequestError("could not insert an ancestry with empty name") return commonerr.NewBadRequestError("could not insert an ancestry with empty name")
} }
if len(ancestry.Layers) == 0 { if len(ancestry.Layers) == 0 {
log.Warning("Empty ancestry is not allowed") log.Error("Empty ancestry is not allowed")
return commonerr.NewBadRequestError("could not insert an ancestry with 0 layers") return commonerr.NewBadRequestError("could not insert an ancestry with 0 layers")
} }
err := tx.deleteAncestry(ancestry.Name) if err := tx.deleteAncestry(ancestry.Name); err != nil {
if err != nil {
return err return err
} }
var ancestryID int64 var ancestryID int64
err = tx.QueryRow(insertAncestry, ancestry.Name).Scan(&ancestryID) if err := tx.QueryRow(insertAncestry, ancestry.Name).Scan(&ancestryID); err != nil {
if err != nil {
if isErrUniqueViolation(err) { if isErrUniqueViolation(err) {
return handleError("insertAncestry", errors.New("Other Go-routine is processing this ancestry (skip).")) return handleError("insertAncestry", errors.New("other Go-routine is processing this ancestry (skip)"))
} }
return handleError("insertAncestry", err) return handleError("insertAncestry", err)
} }
err = tx.insertAncestryLayers(ancestryID, ancestry.Layers) if err := tx.insertAncestryLayers(ancestryID, ancestry.Layers); err != nil {
if err != nil {
return err
}
err = tx.insertAncestryFeatures(ancestryID, features)
if err != nil {
return err return err
} }
@ -52,71 +41,82 @@ func (tx *pgSession) UpsertAncestry(ancestry database.Ancestry, features []datab
"persistAncestryLister", "persistAncestryLister",
persistAncestryDetector, persistAncestryDetector,
"persistAncestryDetector", "persistAncestryDetector",
ancestryID, processedBy) ancestryID, ancestry.ProcessedBy)
} }
func (tx *pgSession) FindAncestry(name string) (database.Ancestry, database.Processors, bool, error) { func (tx *pgSession) FindAncestry(name string) (database.Ancestry, bool, error) {
ancestry := database.Ancestry{Name: name}
processed := database.Processors{}
var ancestryID int64
err := tx.QueryRow(searchAncestry, name).Scan(&ancestryID)
if err != nil {
if err == sql.ErrNoRows {
return ancestry, processed, false, nil
}
return ancestry, processed, false, handleError("searchAncestry", err)
}
ancestry.Layers, err = tx.findAncestryLayers(ancestryID)
if err != nil {
return ancestry, processed, false, err
}
processed.Detectors, err = tx.findProcessors(searchAncestryDetectors, "searchAncestryDetectors", "detector", ancestryID)
if err != nil {
return ancestry, processed, false, err
}
processed.Listers, err = tx.findProcessors(searchAncestryListers, "searchAncestryListers", "lister", ancestryID)
if err != nil {
return ancestry, processed, false, err
}
return ancestry, processed, true, nil
}
func (tx *pgSession) FindAncestryFeatures(name string) (database.AncestryWithFeatures, bool, error) {
var ( var (
awf database.AncestryWithFeatures ancestryID int64
ok bool ancestry = database.Ancestry{Name: name}
err error err error
) )
awf.Ancestry, awf.ProcessedBy, ok, err = tx.FindAncestry(name)
if err != nil { if err = tx.QueryRow(searchAncestry, name).Scan(&ancestryID); err != nil {
return awf, false, err if err == sql.ErrNoRows {
return ancestry, false, nil
}
return ancestry, false, handleError("searchAncestry", err)
} }
if !ok { if ancestry.Layers, err = tx.findAncestryLayers(ancestryID); err != nil {
return awf, false, nil return ancestry, false, err
}
if ancestry.ProcessedBy.Detectors, err = tx.findProcessors(searchAncestryDetectors, "searchAncestryDetectors", "detector", ancestryID); err != nil {
return ancestry, false, err
}
if ancestry.ProcessedBy.Listers, err = tx.findProcessors(searchAncestryListers, "searchAncestryListers", "lister", ancestryID); err != nil {
return ancestry, false, err
}
return ancestry, true, nil
}
func (tx *pgSession) FindAncestryWithContent(name string) (database.AncestryWithContent, bool, error) {
var (
ancestryContent database.AncestryWithContent
isValid bool
err error
)
if ancestryContent.Ancestry, isValid, err = tx.FindAncestry(name); err != nil || !isValid {
return ancestryContent, isValid, err
} }
rows, err := tx.Query(searchAncestryFeatures, name) rows, err := tx.Query(searchAncestryFeatures, name)
if err != nil { if err != nil {
return awf, false, handleError("searchAncestryFeatures", err) return ancestryContent, false, handleError("searchAncestryFeatures", err)
} }
features := map[int][]database.NamespacedFeature{}
for rows.Next() { for rows.Next() {
nf := database.NamespacedFeature{} var (
err := rows.Scan(&nf.Namespace.Name, &nf.Namespace.VersionFormat, &nf.Feature.Name, &nf.Feature.Version) feature database.NamespacedFeature
if err != nil { // layerIndex is used to determine which layer the namespaced feature belongs to.
return awf, false, handleError("searchAncestryFeatures", err) layerIndex sql.NullInt64
)
if err := rows.Scan(&feature.Namespace.Name,
&feature.Namespace.VersionFormat,
&feature.Feature.Name, &feature.Feature.Version,
&layerIndex); err != nil {
return ancestryContent, false, handleError("searchAncestryFeatures", err)
} }
nf.Feature.VersionFormat = nf.Namespace.VersionFormat
awf.Features = append(awf.Features, nf) feature.Feature.VersionFormat = feature.Namespace.VersionFormat // This looks strange.
features[int(layerIndex.Int64)] = append(features[int(layerIndex.Int64)], feature)
} }
return awf, true, nil // By the assumption of Ancestry Layer Index, we have the ancestry's layer
// index corresponding to the index in the array.
for index, layer := range ancestryContent.Ancestry.Layers {
ancestryLayer := database.AncestryLayer{Layer: layer}
ancestryLayer.DetectedFeatures, _ = features[index]
ancestryContent.Layers = append(ancestryContent.Layers, ancestryLayer)
}
return ancestryContent, true, nil
} }
func (tx *pgSession) deleteAncestry(name string) error { func (tx *pgSession) deleteAncestry(name string) error {
@ -164,97 +164,62 @@ func (tx *pgSession) findAncestryLayers(ancestryID int64) ([]database.Layer, err
if err != nil { if err != nil {
return nil, handleError("searchAncestryLayer", err) return nil, handleError("searchAncestryLayer", err)
} }
layers := []database.Layer{} layers := []database.Layer{}
for rows.Next() { for rows.Next() {
var layer database.Layer var layer database.Layer
err := rows.Scan(&layer.Hash) if err := rows.Scan(&layer.Hash); err != nil {
if err != nil {
return nil, handleError("searchAncestryLayer", err) return nil, handleError("searchAncestryLayer", err)
} }
layers = append(layers, layer) layers = append(layers, layer)
} }
return layers, nil return layers, nil
} }
func (tx *pgSession) insertAncestryLayers(ancestryID int64, layers []database.Layer) error { // insertAncestryLayers inserts the ancestry layers along with its content into
layerIDs := map[string]sql.NullInt64{} // the database. The layers are 0 based indexed in the original order.
for _, l := range layers { func (tx *pgSession) insertAncestryLayers(ancestryID int64, layers []database.AncestryLayer) error {
layerIDs[l.Hash] = sql.NullInt64{}
}
layerHashes := []string{}
for hash := range layerIDs {
layerHashes = append(layerHashes, hash)
}
rows, err := tx.Query(searchLayerIDs, pq.Array(layerHashes))
if err != nil {
return handleError("searchLayerIDs", err)
}
for rows.Next() {
var (
layerID sql.NullInt64
layerName string
)
err := rows.Scan(&layerID, &layerName)
if err != nil {
return handleError("searchLayerIDs", err)
}
layerIDs[layerName] = layerID
}
notFound := []string{}
for hash, id := range layerIDs {
if !id.Valid {
notFound = append(notFound, hash)
}
}
if len(notFound) > 0 {
return handleError("searchLayerIDs", fmt.Errorf("Layer %s is not found in database", strings.Join(notFound, ",")))
}
//TODO(Sida): use bulk insert. //TODO(Sida): use bulk insert.
stmt, err := tx.Prepare(insertAncestryLayer) stmt, err := tx.Prepare(insertAncestryLayer)
if err != nil { if err != nil {
return handleError("insertAncestryLayer", err) return handleError("insertAncestryLayer", err)
} }
defer stmt.Close() ancestryLayerIDs := []sql.NullInt64{}
for index, layer := range layers { for index, layer := range layers {
_, err := stmt.Exec(ancestryID, index, layerIDs[layer.Hash].Int64) var ancestryLayerID sql.NullInt64
if err != nil { if err := stmt.QueryRow(ancestryID, index, layer.Hash).Scan(&ancestryLayerID); err != nil {
return handleError("insertAncestryLayer", commonerr.CombineErrors(err, stmt.Close())) return handleError("insertAncestryLayer", commonerr.CombineErrors(err, stmt.Close()))
} }
}
ancestryLayerIDs = append(ancestryLayerIDs, ancestryLayerID)
return nil }
}
if err := stmt.Close(); err != nil {
func (tx *pgSession) insertAncestryFeatures(ancestryID int64, features []database.NamespacedFeature) error { return handleError("Failed to close insertAncestryLayer statement", err)
featureIDs, err := tx.findNamespacedFeatureIDs(features) }
if err != nil {
return err stmt, err = tx.Prepare(insertAncestryLayerFeature)
} defer stmt.Close()
//TODO(Sida): use bulk insert. for i, layer := range layers {
stmtFeatures, err := tx.Prepare(insertAncestryFeature) var (
if err != nil { nsFeatureIDs []sql.NullInt64
return handleError("insertAncestryFeature", err) layerID = ancestryLayerIDs[i]
} )
defer stmtFeatures.Close() if nsFeatureIDs, err = tx.findNamespacedFeatureIDs(layer.DetectedFeatures); err != nil {
return err
for _, id := range featureIDs { }
if !id.Valid {
return errors.New("requested namespaced feature is not in database") for _, id := range nsFeatureIDs {
} if _, err := stmt.Exec(layerID, id); err != nil {
return handleError("insertAncestryLayerFeature", commonerr.CombineErrors(err, stmt.Close()))
_, err := stmtFeatures.Exec(ancestryID, id) }
if err != nil { }
return handleError("insertAncestryFeature", err)
}
} }
return nil return nil

View File

@ -26,26 +26,53 @@ import (
func TestUpsertAncestry(t *testing.T) { func TestUpsertAncestry(t *testing.T) {
store, tx := openSessionForTest(t, "UpsertAncestry", true) store, tx := openSessionForTest(t, "UpsertAncestry", true)
defer closeTest(t, store, tx) defer closeTest(t, store, tx)
a1 := database.Ancestry{ a1 := database.AncestryWithContent{
Name: "a1", Ancestry: database.Ancestry{
Layers: []database.Layer{ Name: "a1",
{Hash: "layer-N"}, Layers: []database.Layer{
{Hash: "layer-N"},
},
},
Layers: []database.AncestryLayer{
{
Layer: database.Layer{
Hash: "layer-N",
},
},
}, },
} }
a2 := database.Ancestry{} a2 := database.AncestryWithContent{}
a3 := database.Ancestry{ a3 := database.AncestryWithContent{
Name: "a", Ancestry: database.Ancestry{
Layers: []database.Layer{ Name: "a",
{Hash: "layer-0"}, Layers: []database.Layer{
{Hash: "layer-0"},
},
},
Layers: []database.AncestryLayer{
{
Layer: database.Layer{
Hash: "layer-0",
},
},
}, },
} }
a4 := database.Ancestry{ a4 := database.AncestryWithContent{
Name: "a", Ancestry: database.Ancestry{
Layers: []database.Layer{ Name: "a",
{Hash: "layer-1"}, Layers: []database.Layer{
{Hash: "layer-1"},
},
},
Layers: []database.AncestryLayer{
{
Layer: database.Layer{
Hash: "layer-1",
},
},
}, },
} }
@ -83,17 +110,20 @@ func TestUpsertAncestry(t *testing.T) {
Feature: f2, Feature: f2,
} }
a4.ProcessedBy = p
// invalid case // invalid case
assert.NotNil(t, tx.UpsertAncestry(a1, nil, database.Processors{})) assert.NotNil(t, tx.UpsertAncestry(a1))
assert.NotNil(t, tx.UpsertAncestry(a2, nil, database.Processors{})) assert.NotNil(t, tx.UpsertAncestry(a2))
// valid case // valid case
assert.Nil(t, tx.UpsertAncestry(a3, nil, database.Processors{})) assert.Nil(t, tx.UpsertAncestry(a3))
a4.Layers[0].DetectedFeatures = []database.NamespacedFeature{nsf1, nsf2}
// replace invalid case // replace invalid case
assert.NotNil(t, tx.UpsertAncestry(a4, []database.NamespacedFeature{nsf1, nsf2}, p)) assert.NotNil(t, tx.UpsertAncestry(a4))
a4.Layers[0].DetectedFeatures = []database.NamespacedFeature{nsf1}
// replace valid case // replace valid case
assert.Nil(t, tx.UpsertAncestry(a4, []database.NamespacedFeature{nsf1}, p)) assert.Nil(t, tx.UpsertAncestry(a4))
// validate // validate
ancestry, ok, err := tx.FindAncestryFeatures("a") ancestry, ok, err := tx.FindAncestryWithContent("a")
assert.Nil(t, err) assert.Nil(t, err)
assert.True(t, ok) assert.True(t, ok)
assert.Equal(t, a4, ancestry.Ancestry) assert.Equal(t, a4, ancestry.Ancestry)
@ -111,8 +141,7 @@ func TestFindAncestry(t *testing.T) {
store, tx := openSessionForTest(t, "FindAncestry", true) store, tx := openSessionForTest(t, "FindAncestry", true)
defer closeTest(t, store, tx) defer closeTest(t, store, tx)
// not found _, ok, err := tx.FindAncestry("ancestry-non")
_, _, ok, err := tx.FindAncestry("ancestry-non")
assert.Nil(t, err) assert.Nil(t, err)
assert.False(t, ok) assert.False(t, ok)
@ -124,41 +153,52 @@ func TestFindAncestry(t *testing.T) {
{Hash: "layer-2"}, {Hash: "layer-2"},
{Hash: "layer-3a"}, {Hash: "layer-3a"},
}, },
ProcessedBy: database.Processors{
Detectors: []string{"os-release"},
Listers: []string{"dpkg"},
},
} }
expectedProcessors := database.Processors{ a, ok2, err := tx.FindAncestry("ancestry-1")
Detectors: []string{"os-release"},
Listers: []string{"dpkg"},
}
// found
a, p, ok2, err := tx.FindAncestry("ancestry-1")
if assert.Nil(t, err) && assert.True(t, ok2) { if assert.Nil(t, err) && assert.True(t, ok2) {
assertAncestryEqual(t, expected, a) assertAncestryEqual(t, expected, a)
assertProcessorsEqual(t, expectedProcessors, p)
} }
} }
func assertAncestryWithFeatureEqual(t *testing.T, expected database.AncestryWithFeatures, actual database.AncestryWithFeatures) bool { func assertAncestryWithFeatureEqual(t *testing.T, expected database.AncestryWithContent, actual database.AncestryWithContent) bool {
return assertAncestryEqual(t, expected.Ancestry, actual.Ancestry) && if assertAncestryEqual(t, expected.Ancestry, actual.Ancestry) && assert.Equal(t, len(expected.Layers), len(actual.Layers)) {
assertNamespacedFeatureEqual(t, expected.Features, actual.Features) && for index, layer := range expected.Layers {
assertProcessorsEqual(t, expected.ProcessedBy, actual.ProcessedBy) if !assertAncestryLayerEqual(t, layer, actual.Layers[index]) {
} return false
func assertAncestryEqual(t *testing.T, expected database.Ancestry, actual database.Ancestry) bool { }
return assert.Equal(t, expected.Name, actual.Name) && assert.Equal(t, expected.Layers, actual.Layers) }
return true
}
return false
} }
func TestFindAncestryFeatures(t *testing.T) { func assertAncestryLayerEqual(t *testing.T, expected database.AncestryLayer, actual database.AncestryLayer) bool {
store, tx := openSessionForTest(t, "FindAncestryFeatures", true) return assertLayerEqual(t, expected.Layer, actual.Layer) &&
assertNamespacedFeatureEqual(t, expected.DetectedFeatures, actual.DetectedFeatures)
}
func assertAncestryEqual(t *testing.T, expected database.Ancestry, actual database.Ancestry) bool {
return assert.Equal(t, expected.Name, actual.Name) &&
assert.Equal(t, expected.Layers, actual.Layers) &&
assertProcessorsEqual(t, expected.ProcessedBy, actual.ProcessedBy)
}
func TestFindAncestryWithContent(t *testing.T) {
store, tx := openSessionForTest(t, "FindAncestryWithContent", true)
defer closeTest(t, store, tx) defer closeTest(t, store, tx)
// invalid // invalid
_, ok, err := tx.FindAncestryFeatures("ancestry-non") _, ok, err := tx.FindAncestryWithContent("ancestry-non")
if assert.Nil(t, err) { if assert.Nil(t, err) {
assert.False(t, ok) assert.False(t, ok)
} }
expected := database.AncestryWithFeatures{ expected := database.AncestryWithContent{
Ancestry: database.Ancestry{ Ancestry: database.Ancestry{
Name: "ancestry-2", Name: "ancestry-2",
Layers: []database.Layer{ Layers: []database.Layer{
@ -167,41 +207,62 @@ func TestFindAncestryFeatures(t *testing.T) {
{Hash: "layer-2"}, {Hash: "layer-2"},
{Hash: "layer-3b"}, {Hash: "layer-3b"},
}, },
ProcessedBy: database.Processors{
Detectors: []string{"os-release"},
Listers: []string{"dpkg"},
},
}, },
ProcessedBy: database.Processors{
Detectors: []string{"os-release"}, Layers: []database.AncestryLayer{
Listers: []string{"dpkg"},
},
Features: []database.NamespacedFeature{
{ {
Namespace: database.Namespace{ Layer: database.Layer{
Name: "debian:7", Hash: "layer-0",
VersionFormat: "dpkg",
}, },
Feature: database.Feature{ DetectedFeatures: []database.NamespacedFeature{
Name: "wechat", {
Version: "0.5", Namespace: database.Namespace{
VersionFormat: "dpkg", Name: "debian:7",
VersionFormat: "dpkg",
},
Feature: database.Feature{
Name: "wechat",
Version: "0.5",
VersionFormat: "dpkg",
},
},
{
Namespace: database.Namespace{
Name: "debian:8",
VersionFormat: "dpkg",
},
Feature: database.Feature{
Name: "openssl",
Version: "1.0",
VersionFormat: "dpkg",
},
},
}, },
}, },
{ {
Namespace: database.Namespace{ Layer: database.Layer{
Name: "debian:8", Hash: "layer-1",
VersionFormat: "dpkg",
}, },
Feature: database.Feature{ },
Name: "openssl", {
Version: "1.0", Layer: database.Layer{
VersionFormat: "dpkg", Hash: "layer-2",
},
},
{
Layer: database.Layer{
Hash: "layer-3b",
}, },
}, },
}, },
} }
// valid // valid
ancestry, ok, err := tx.FindAncestryFeatures("ancestry-2") ancestry, ok, err := tx.FindAncestryWithContent("ancestry-2")
if assert.Nil(t, err) && assert.True(t, ok) { if assert.Nil(t, err) && assert.True(t, ok) {
assertAncestryEqual(t, expected.Ancestry, ancestry.Ancestry) assertAncestryWithFeatureEqual(t, expected, ancestry)
assertNamespacedFeatureEqual(t, expected.Features, ancestry.Features)
assertProcessorsEqual(t, expected.ProcessedBy, ancestry.ProcessedBy)
} }
} }

View File

@ -22,9 +22,9 @@ import (
"github.com/coreos/clair/pkg/commonerr" "github.com/coreos/clair/pkg/commonerr"
) )
func (tx *pgSession) FindLayer(hash string) (database.Layer, database.Processors, bool, error) { func (tx *pgSession) FindLayer(hash string) (database.Layer, bool, error) {
l, p, _, ok, err := tx.findLayer(hash) layer, _, ok, err := tx.findLayer(hash)
return l, p, ok, err return layer, ok, err
} }
func (tx *pgSession) FindLayerWithContent(hash string) (database.LayerWithContent, bool, error) { func (tx *pgSession) FindLayerWithContent(hash string) (database.LayerWithContent, bool, error) {
@ -35,7 +35,7 @@ func (tx *pgSession) FindLayerWithContent(hash string) (database.LayerWithConten
err error err error
) )
layer.Layer, layer.ProcessedBy, layerID, ok, err = tx.findLayer(hash) layer.Layer, layerID, ok, err = tx.findLayer(hash)
if err != nil { if err != nil {
return layer, false, err return layer, false, err
} }
@ -49,12 +49,12 @@ func (tx *pgSession) FindLayerWithContent(hash string) (database.LayerWithConten
return layer, true, nil return layer, true, nil
} }
func (tx *pgSession) PersistLayer(layer database.Layer) error { func (tx *pgSession) PersistLayer(hash string) error {
if layer.Hash == "" { if hash == "" {
return commonerr.NewBadRequestError("Empty Layer Hash is not allowed") return commonerr.NewBadRequestError("Empty Layer Hash is not allowed")
} }
_, err := tx.Exec(queryPersistLayer(1), layer.Hash) _, err := tx.Exec(queryPersistLayer(1), hash)
if err != nil { if err != nil {
return handleError("queryPersistLayer", err) return handleError("queryPersistLayer", err)
} }
@ -275,34 +275,33 @@ func (tx *pgSession) findLayerFeatures(layerID int64) ([]database.Feature, error
return features, nil return features, nil
} }
func (tx *pgSession) findLayer(hash string) (database.Layer, database.Processors, int64, bool, error) { func (tx *pgSession) findLayer(hash string) (database.Layer, int64, bool, error) {
var ( var (
layerID int64 layerID int64
layer = database.Layer{Hash: hash} layer = database.Layer{Hash: hash, ProcessedBy: database.Processors{}}
processors database.Processors
) )
if hash == "" { if hash == "" {
return layer, processors, layerID, false, commonerr.NewBadRequestError("Empty Layer Hash is not allowed") return layer, layerID, false, commonerr.NewBadRequestError("Empty Layer Hash is not allowed")
} }
err := tx.QueryRow(searchLayer, hash).Scan(&layerID) err := tx.QueryRow(searchLayer, hash).Scan(&layerID)
if err != nil { if err != nil {
if err == sql.ErrNoRows { if err == sql.ErrNoRows {
return layer, processors, layerID, false, nil return layer, layerID, false, nil
} }
return layer, processors, layerID, false, err return layer, layerID, false, err
} }
processors.Detectors, err = tx.findProcessors(searchLayerDetectors, "searchLayerDetectors", "detector", layerID) layer.ProcessedBy.Detectors, err = tx.findProcessors(searchLayerDetectors, "searchLayerDetectors", "detector", layerID)
if err != nil { if err != nil {
return layer, processors, layerID, false, err return layer, layerID, false, err
} }
processors.Listers, err = tx.findProcessors(searchLayerListers, "searchLayerListers", "lister", layerID) layer.ProcessedBy.Listers, err = tx.findProcessors(searchLayerListers, "searchLayerListers", "lister", layerID)
if err != nil { if err != nil {
return layer, processors, layerID, false, err return layer, layerID, false, err
} }
return layer, processors, layerID, true, nil return layer, layerID, true, nil
} }

View File

@ -26,8 +26,8 @@ func TestPersistLayer(t *testing.T) {
datastore, tx := openSessionForTest(t, "PersistLayer", false) datastore, tx := openSessionForTest(t, "PersistLayer", false)
defer closeTest(t, datastore, tx) defer closeTest(t, datastore, tx)
l1 := database.Layer{} l1 := ""
l2 := database.Layer{Hash: "HESOYAM"} l2 := "HESOYAM"
// invalid // invalid
assert.NotNil(t, tx.PersistLayer(l1)) assert.NotNil(t, tx.PersistLayer(l1))
@ -51,24 +51,25 @@ func TestFindLayer(t *testing.T) {
datastore, tx := openSessionForTest(t, "FindLayer", true) datastore, tx := openSessionForTest(t, "FindLayer", true)
defer closeTest(t, datastore, tx) defer closeTest(t, datastore, tx)
expected := database.Layer{Hash: "layer-4"} expected := database.Layer{
expectedProcessors := database.Processors{ Hash: "layer-4",
Detectors: []string{"os-release", "apt-sources"}, ProcessedBy: database.Processors{
Listers: []string{"dpkg", "rpm"}, Detectors: []string{"os-release", "apt-sources"},
Listers: []string{"dpkg", "rpm"},
},
} }
// invalid // invalid
_, _, _, err := tx.FindLayer("") _, _, err := tx.FindLayer("")
assert.NotNil(t, err) assert.NotNil(t, err)
_, _, ok, err := tx.FindLayer("layer-non") _, ok, err := tx.FindLayer("layer-non")
assert.Nil(t, err) assert.Nil(t, err)
assert.False(t, ok) assert.False(t, ok)
// valid // valid
layer, processors, ok2, err := tx.FindLayer("layer-4") layer, ok2, err := tx.FindLayer("layer-4")
if assert.Nil(t, err) && assert.True(t, ok2) { if assert.Nil(t, err) && assert.True(t, ok2) {
assert.Equal(t, expected, layer) assertLayerEqual(t, expected, layer)
assertProcessorsEqual(t, expectedProcessors, processors)
} }
} }
@ -85,6 +86,10 @@ func TestFindLayerWithContent(t *testing.T) {
expectedL := database.LayerWithContent{ expectedL := database.LayerWithContent{
Layer: database.Layer{ Layer: database.Layer{
Hash: "layer-4", Hash: "layer-4",
ProcessedBy: database.Processors{
Detectors: []string{"os-release", "apt-sources"},
Listers: []string{"dpkg", "rpm"},
},
}, },
Features: []database.Feature{ Features: []database.Feature{
{Name: "fake", Version: "2.0", VersionFormat: "rpm"}, {Name: "fake", Version: "2.0", VersionFormat: "rpm"},
@ -94,10 +99,6 @@ func TestFindLayerWithContent(t *testing.T) {
{Name: "debian:7", VersionFormat: "dpkg"}, {Name: "debian:7", VersionFormat: "dpkg"},
{Name: "fake:1.0", VersionFormat: "rpm"}, {Name: "fake:1.0", VersionFormat: "rpm"},
}, },
ProcessedBy: database.Processors{
Detectors: []string{"os-release", "apt-sources"},
Listers: []string{"dpkg", "rpm"},
},
} }
layer, ok2, err := tx.FindLayerWithContent("layer-4") layer, ok2, err := tx.FindLayerWithContent("layer-4")
@ -107,8 +108,12 @@ func TestFindLayerWithContent(t *testing.T) {
} }
func assertLayerWithContentEqual(t *testing.T, expected database.LayerWithContent, actual database.LayerWithContent) bool { func assertLayerWithContentEqual(t *testing.T, expected database.LayerWithContent, actual database.LayerWithContent) bool {
return assert.Equal(t, expected.Layer, actual.Layer) && return assertLayerEqual(t, expected.Layer, actual.Layer) &&
assertFeaturesEqual(t, expected.Features, actual.Features) && assertFeaturesEqual(t, expected.Features, actual.Features) &&
assertProcessorsEqual(t, expected.ProcessedBy, actual.ProcessedBy) &&
assertNamespacesEqual(t, expected.Namespaces, actual.Namespaces) assertNamespacesEqual(t, expected.Namespaces, actual.Namespaces)
} }
func assertLayerEqual(t *testing.T, expected database.Layer, actual database.Layer) bool {
return assertProcessorsEqual(t, expected.ProcessedBy, actual.ProcessedBy) &&
assert.Equal(t, expected.Hash, actual.Hash)
}

View File

@ -89,11 +89,11 @@ func init() {
UNIQUE (ancestry_id, ancestry_index));`, UNIQUE (ancestry_id, ancestry_index));`,
`CREATE INDEX ON ancestry_layer(ancestry_id);`, `CREATE INDEX ON ancestry_layer(ancestry_id);`,
`CREATE TABLE IF NOT EXISTS ancestry_feature ( `CREATE TABLE IF NOT EXISTS ancestry_feature(
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
ancestry_id INT REFERENCES ancestry ON DELETE CASCADE, ancestry_layer_id INT REFERENCES ancestry_layer ON DELETE CASCADE,
namespaced_feature_id INT REFERENCES namespaced_feature ON DELETE CASCADE, namespaced_feature_id INT REFERENCES namespaced_feature ON DELETE CASCADE,
UNIQUE (ancestry_id, namespaced_feature_id));`, UNIQUE (ancestry_layer_id, namespaced_feature_id));`,
`CREATE TABLE IF NOT EXISTS ancestry_lister ( `CREATE TABLE IF NOT EXISTS ancestry_lister (
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
@ -168,9 +168,9 @@ func init() {
`DROP TABLE IF EXISTS `DROP TABLE IF EXISTS
ancestry, ancestry,
ancestry_layer, ancestry_layer,
ancestry_feature,
ancestry_detector, ancestry_detector,
ancestry_lister, ancestry_lister,
ancestry_feature,
feature, feature,
namespaced_feature, namespaced_feature,
keyvalue, keyvalue,

View File

@ -196,10 +196,10 @@ const (
SELECT DISTINCT ON (a.id) SELECT DISTINCT ON (a.id)
a.id, a.name a.id, a.name
FROM vulnerability_affected_namespaced_feature AS vanf, FROM vulnerability_affected_namespaced_feature AS vanf,
ancestry AS a, ancestry_feature AS af ancestry_layer AS al, ancestry_feature AS af
WHERE vanf.vulnerability_id = $1 WHERE vanf.vulnerability_id = $1
AND a.id >= $2 AND al.ancestry_id >= $2
AND a.id = af.ancestry_id AND al.id = af.ancestry_layer_id
AND af.namespaced_feature_id = vanf.namespaced_feature_id AND af.namespaced_feature_id = vanf.namespaced_feature_id
ORDER BY a.id ASC ORDER BY a.id ASC
LIMIT $3;` LIMIT $3;`
@ -211,9 +211,9 @@ const (
WHERE NOT EXISTS (SELECT id FROM ancestry_lister WHERE ancestry_id = $1 AND lister = $2) ON CONFLICT DO NOTHING` WHERE NOT EXISTS (SELECT id FROM ancestry_lister WHERE ancestry_id = $1 AND lister = $2) ON CONFLICT DO NOTHING`
persistAncestryDetector = ` persistAncestryDetector = `
INSERT INTO ancestry_detector (ancestry_id, detector) INSERT INTO ancestry_detector (ancestry_id, detector)
SELECT CAST ($1 AS INTEGER), CAST ($2 AS TEXT) SELECT CAST ($1 AS INTEGER), CAST ($2 AS TEXT)
WHERE NOT EXISTS (SELECT id FROM ancestry_detector WHERE ancestry_id = $1 AND detector = $2) ON CONFLICT DO NOTHING` WHERE NOT EXISTS (SELECT id FROM ancestry_detector WHERE ancestry_id = $1 AND detector = $2) ON CONFLICT DO NOTHING`
insertAncestry = `INSERT INTO ancestry (name) VALUES ($1) RETURNING id` insertAncestry = `INSERT INTO ancestry (name) VALUES ($1) RETURNING id`
@ -225,20 +225,21 @@ const (
ORDER BY ancestry_layer.ancestry_index ASC` ORDER BY ancestry_layer.ancestry_index ASC`
searchAncestryFeatures = ` searchAncestryFeatures = `
SELECT namespace.name, namespace.version_format, feature.name, feature.version SELECT namespace.name, namespace.version_format, feature.name, feature.version, ancestry_layer.ancestry_index
FROM namespace, feature, ancestry, namespaced_feature, ancestry_feature FROM namespace, feature, ancestry, namespaced_feature, ancestry_layer, ancestry_feature
WHERE ancestry.name = $1 WHERE ancestry.name = $1
AND ancestry.id = ancestry_feature.ancestry_id AND ancestry.id = ancestry_layer.ancestry_id
AND ancestry_feature.namespaced_feature_id = namespaced_feature.id AND ancestry_feature.ancestry_layer_id = ancestry_layer.id
AND namespaced_feature.feature_id = feature.id AND ancestry_feature.namespaced_feature_id = namespaced_feature.id
AND namespaced_feature.namespace_id = namespace.id` AND namespaced_feature.feature_id = feature.id
AND namespaced_feature.namespace_id = namespace.id`
searchAncestry = `SELECT id FROM ancestry WHERE name = $1` searchAncestry = `SELECT id FROM ancestry WHERE name = $1`
searchAncestryDetectors = `SELECT detector FROM ancestry_detector WHERE ancestry_id = $1` searchAncestryDetectors = `SELECT detector FROM ancestry_detector WHERE ancestry_id = $1`
searchAncestryListers = `SELECT lister FROM ancestry_lister WHERE ancestry_id = $1` searchAncestryListers = `SELECT lister FROM ancestry_lister WHERE ancestry_id = $1`
removeAncestry = `DELETE FROM ancestry WHERE name = $1` removeAncestry = `DELETE FROM ancestry WHERE name = $1`
insertAncestryLayer = `INSERT INTO ancestry_layer(ancestry_id, ancestry_index, layer_id) VALUES($1,$2,$3)` insertAncestryLayer = `INSERT INTO ancestry_layer(ancestry_id, ancestry_index, layer_id) VALUES($1,$2, (SELECT layer.id FROM layer WHERE hash = $3 LIMIT 1)) RETURNING id`
insertAncestryFeature = `INSERT INTO ancestry_feature(ancestry_id, namespaced_feature_id) VALUES ($1, $2)` insertAncestryLayerFeature = `INSERT INTO ancestry_feature(ancestry_layer_id, namespaced_feature_id) VALUES ($1, $2)`
) )
// NOTE(Sida): Every search query can only have count less than postgres set // NOTE(Sida): Every search query can only have count less than postgres set

View File

@ -56,8 +56,8 @@ INSERT INTO layer_detector(id, layer_id, detector) VALUES
INSERT INTO ancestry (id, name) VALUES INSERT INTO ancestry (id, name) VALUES
(1, 'ancestry-1'), -- layer-0, layer-1, layer-2, layer-3a (1, 'ancestry-1'), -- layer-0, layer-1, layer-2, layer-3a
(2, 'ancestry-2'), -- layer-0, layer-1, layer-2, layer-3b (2, 'ancestry-2'), -- layer-0, layer-1, layer-2, layer-3b
(3, 'ancestry-3'), -- empty; just for testing the vulnerable ancestry (3, 'ancestry-3'), -- layer-0
(4, 'ancestry-4'); -- empty; just for testing the vulnerable ancestry (4, 'ancestry-4'); -- layer-0
INSERT INTO ancestry_lister (id, ancestry_id, lister) VALUES INSERT INTO ancestry_lister (id, ancestry_id, lister) VALUES
(1, 1, 'dpkg'), (1, 1, 'dpkg'),
@ -69,7 +69,9 @@ INSERT INTO ancestry_detector (id, ancestry_id, detector) VALUES
INSERT INTO ancestry_layer (id, ancestry_id, layer_id, ancestry_index) VALUES INSERT INTO ancestry_layer (id, ancestry_id, layer_id, ancestry_index) VALUES
(1, 1, 1, 0),(2, 1, 2, 1),(3, 1, 3, 2),(4, 1, 4, 3), (1, 1, 1, 0),(2, 1, 2, 1),(3, 1, 3, 2),(4, 1, 4, 3),
(5, 2, 1, 0),(6, 2, 2, 1),(7, 2, 3, 2),(8, 2, 5, 3); (5, 2, 1, 0),(6, 2, 2, 1),(7, 2, 3, 2),(8, 2, 5, 3),
(9, 3, 1, 0),
(10, 4, 1, 0);
INSERT INTO namespaced_feature(id, feature_id, namespace_id) VALUES INSERT INTO namespaced_feature(id, feature_id, namespace_id) VALUES
(1, 1, 1), -- wechat 0.5, debian:7 (1, 1, 1), -- wechat 0.5, debian:7
@ -77,10 +79,12 @@ INSERT INTO namespaced_feature(id, feature_id, namespace_id) VALUES
(3, 2, 2), -- openssl 1.0, debian:8 (3, 2, 2), -- openssl 1.0, debian:8
(4, 3, 1); -- openssl 2.0, debian:7 (4, 3, 1); -- openssl 2.0, debian:7
INSERT INTO ancestry_feature (id, ancestry_id, namespaced_feature_id) VALUES -- assume that ancestry-3 and ancestry-4 are vulnerable.
(1, 1, 1), (2, 1, 4), INSERT INTO ancestry_feature (id, ancestry_layer_id, namespaced_feature_id) VALUES
(3, 2, 1), (4, 2, 3), (1, 1, 1), (2, 1, 4), -- ancestry-1, layer 0 introduces 1, 4
(5, 3, 2), (6, 4, 2); -- assume that ancestry-3 and ancestry-4 are vulnerable. (3, 5, 1), (4, 5, 3), -- ancestry-2, layer 0 introduces 1, 3
(5, 9, 2), -- ancestry-3, layer 0 introduces 2
(6, 10, 2); -- ancestry-4, layer 0 introduces 2
INSERT INTO vulnerability (id, namespace_id, name, description, link, severity) VALUES INSERT INTO vulnerability (id, namespace_id, name, description, link, severity) VALUES
(1, 1, 'CVE-OPENSSL-1-DEB7', 'A vulnerability affecting OpenSSL < 2.0 on Debian 7.0', 'http://google.com/#q=CVE-OPENSSL-1-DEB7', 'High'), (1, 1, 'CVE-OPENSSL-1-DEB7', 'A vulnerability affecting OpenSSL < 2.0 on Debian 7.0', 'http://google.com/#q=CVE-OPENSSL-1-DEB7', 'High'),

156
worker.go
View File

@ -160,8 +160,7 @@ func getLayer(datastore database.Datastore, req LayerRequest) (layer database.La
} }
if !ok { if !ok {
l := database.Layer{Hash: req.Hash} err = tx.PersistLayer(req.Hash)
err = tx.PersistLayer(l)
if err != nil { if err != nil {
return return
} }
@ -170,7 +169,9 @@ func getLayer(datastore database.Datastore, req LayerRequest) (layer database.La
return return
} }
layer = database.LayerWithContent{Layer: l} layer = database.LayerWithContent{}
layer.Hash = req.Hash
preq = &processRequest{ preq = &processRequest{
request: req, request: req,
notProcessedBy: Processors, notProcessedBy: Processors,
@ -313,11 +314,11 @@ func combineLayers(layer database.LayerWithContent, partial partialLayer) databa
layer.ProcessedBy.Listers = append(layer.ProcessedBy.Listers, strutil.CompareStringLists(partial.processedBy.Listers, layer.ProcessedBy.Listers)...) layer.ProcessedBy.Listers = append(layer.ProcessedBy.Listers, strutil.CompareStringLists(partial.processedBy.Listers, layer.ProcessedBy.Listers)...)
return database.LayerWithContent{ return database.LayerWithContent{
Layer: database.Layer{ Layer: database.Layer{
Hash: layer.Hash, Hash: layer.Hash,
ProcessedBy: layer.ProcessedBy,
}, },
ProcessedBy: layer.ProcessedBy, Features: features,
Features: features, Namespaces: namespaces,
Namespaces: namespaces,
} }
} }
@ -327,7 +328,7 @@ func isAncestryProcessed(datastore database.Datastore, name string) (bool, error
return false, err return false, err
} }
defer tx.Rollback() defer tx.Rollback()
_, processed, ok, err := tx.FindAncestry(name) ancestry, ok, err := tx.FindAncestry(name)
if err != nil { if err != nil {
return false, err return false, err
} }
@ -335,14 +336,20 @@ func isAncestryProcessed(datastore database.Datastore, name string) (bool, error
return false, nil return false, nil
} }
notProcessed := getNotProcessedBy(processed) notProcessed := getNotProcessedBy(ancestry.ProcessedBy)
return len(notProcessed.Detectors) == 0 && len(notProcessed.Listers) == 0, nil return len(notProcessed.Detectors) == 0 && len(notProcessed.Listers) == 0, nil
} }
// ProcessAncestry downloads and scans an ancestry if it's not scanned by all // ProcessAncestry downloads and scans an ancestry if it's not scanned by all
// enabled processors in this instance of Clair. // enabled processors in this instance of Clair.
func ProcessAncestry(datastore database.Datastore, imageFormat, name string, layerRequest []LayerRequest) error { func ProcessAncestry(datastore database.Datastore, imageFormat, name string, layerRequest []LayerRequest) error {
var err error var (
err error
ok bool
layers []database.LayerWithContent
commonProcessors database.Processors
)
if name == "" { if name == "" {
return commonerr.NewBadRequestError("could not process a layer which does not have a name") return commonerr.NewBadRequestError("could not process a layer which does not have a name")
} }
@ -351,43 +358,53 @@ func ProcessAncestry(datastore database.Datastore, imageFormat, name string, lay
return commonerr.NewBadRequestError("could not process a layer which does not have a format") return commonerr.NewBadRequestError("could not process a layer which does not have a format")
} }
if ok, err := isAncestryProcessed(datastore, name); ok && err == nil { if ok, err = isAncestryProcessed(datastore, name); err != nil {
return err
} else if ok {
log.WithField("ancestry", name).Debug("Ancestry is processed") log.WithField("ancestry", name).Debug("Ancestry is processed")
return nil return nil
} else if err != nil { }
if layers, err = processLayers(datastore, imageFormat, layerRequest); err != nil {
return err return err
} }
layers, err := processLayers(datastore, imageFormat, layerRequest) if commonProcessors, err = getProcessors(layers); err != nil {
if err != nil {
return err return err
} }
if !validateProcessors(layers) { return processAncestry(datastore, name, layers, commonProcessors)
// This error might be triggered because of multiple workers are
// processing the same instance with different processors.
return errors.New("ancestry layers are scanned with different listers and detectors")
}
return processAncestry(datastore, name, layers)
} }
func processAncestry(datastore database.Datastore, name string, layers []database.LayerWithContent) error { // getNamespacedFeatures extracts the namespaced features introduced in each
ancestryFeatures, err := computeAncestryFeatures(layers) // layer into one array.
func getNamespacedFeatures(layers []database.AncestryLayer) []database.NamespacedFeature {
features := []database.NamespacedFeature{}
for _, layer := range layers {
features = append(features, layer.DetectedFeatures...)
}
return features
}
func processAncestry(datastore database.Datastore, name string, layers []database.LayerWithContent, commonProcessors database.Processors) error {
var (
ancestry database.AncestryWithContent
err error
)
ancestry.Name = name
ancestry.ProcessedBy = commonProcessors
ancestry.Layers, err = computeAncestryLayers(layers, commonProcessors)
if err != nil { if err != nil {
return err return err
} }
ancestryLayers := make([]database.Layer, 0, len(layers)) ancestryFeatures := getNamespacedFeatures(ancestry.Layers)
for _, layer := range layers {
ancestryLayers = append(ancestryLayers, layer.Layer)
}
log.WithFields(log.Fields{ log.WithFields(log.Fields{
"ancestry": name, "ancestry": name,
"number of features": len(ancestryFeatures), "number of features": len(ancestryFeatures),
"processed by": Processors, "processed by": Processors,
"number of layers": len(ancestryLayers), "number of layers": len(ancestry.Layers),
}).Debug("compute ancestry features") }).Debug("compute ancestry features")
if err := persistNamespacedFeatures(datastore, ancestryFeatures); err != nil { if err := persistNamespacedFeatures(datastore, ancestryFeatures); err != nil {
@ -399,7 +416,7 @@ func processAncestry(datastore database.Datastore, name string, layers []databas
return err return err
} }
err = tx.UpsertAncestry(database.Ancestry{Name: name, Layers: ancestryLayers}, ancestryFeatures, Processors) err = tx.UpsertAncestry(ancestry)
if err != nil { if err != nil {
tx.Rollback() tx.Rollback()
return err return err
@ -440,44 +457,71 @@ func persistNamespacedFeatures(datastore database.Datastore, features []database
return tx.Commit() return tx.Commit()
} }
// validateProcessors checks if the layers processed by same set of processors. // getProcessors retrieves common subset of the processors of each layer.
func validateProcessors(layers []database.LayerWithContent) bool { func getProcessors(layers []database.LayerWithContent) (database.Processors, error) {
if len(layers) == 0 { if len(layers) == 0 {
return true return database.Processors{}, nil
} }
detectors := layers[0].ProcessedBy.Detectors detectors := layers[0].ProcessedBy.Detectors
listers := layers[0].ProcessedBy.Listers listers := layers[0].ProcessedBy.Listers
detectorsLen := len(detectors)
listersLen := len(listers)
for _, l := range layers[1:] { for _, l := range layers[1:] {
if len(strutil.CompareStringLists(detectors, l.ProcessedBy.Detectors)) != 0 || detectors := strutil.CompareStringListsInBoth(detectors, l.ProcessedBy.Detectors)
len(strutil.CompareStringLists(listers, l.ProcessedBy.Listers)) != 0 { listers := strutil.CompareStringListsInBoth(listers, l.ProcessedBy.Listers)
return false
if len(detectors) != detectorsLen || len(listers) != listersLen {
// This error might be triggered because of multiple workers are
// processing the same instance with different processors.
// TODO(sidchen): Once the features can be associated with
// Detectors/Listers, we can support dynamically generating ancestry's
// detector/lister based on the layers.
return database.Processors{}, errors.New("processing layers with different Clair instances is currently unsupported")
} }
} }
return true return database.Processors{
Detectors: detectors,
Listers: listers,
}, nil
} }
// computeAncestryFeatures computes the features in an ancestry based on all type introducedFeature struct {
// layers. feature database.NamespacedFeature
func computeAncestryFeatures(ancestryLayers []database.LayerWithContent) ([]database.NamespacedFeature, error) { layerIndex int
}
// computeAncestryLayers computes ancestry's layers along with what features are
// introduced.
func computeAncestryLayers(layers []database.LayerWithContent, commonProcessors database.Processors) ([]database.AncestryLayer, error) {
// TODO(sidchen): Once the features are linked to specific processor, we
// will use commonProcessors to filter out the features for this ancestry.
// version format -> namespace // version format -> namespace
namespaces := map[string]database.Namespace{} namespaces := map[string]database.Namespace{}
// version format -> feature ID -> feature // version format -> feature ID -> feature
features := map[string]map[string]database.NamespacedFeature{} features := map[string]map[string]introducedFeature{}
for _, layer := range ancestryLayers { ancestryLayers := []database.AncestryLayer{}
// At start of the loop, namespaces and features always contain the for index, layer := range layers {
// previous layer's result. // Initialize the ancestry Layer
initializedLayer := database.AncestryLayer{Layer: layer.Layer, DetectedFeatures: []database.NamespacedFeature{}}
ancestryLayers = append(ancestryLayers, initializedLayer)
// Precondition: namespaces and features contain the result from union
// of all parents.
for _, ns := range layer.Namespaces { for _, ns := range layer.Namespaces {
namespaces[ns.VersionFormat] = ns namespaces[ns.VersionFormat] = ns
} }
// version format -> feature ID -> feature // version format -> feature ID -> feature
currentFeatures := map[string]map[string]database.NamespacedFeature{} currentFeatures := map[string]map[string]introducedFeature{}
for _, f := range layer.Features { for _, f := range layer.Features {
if ns, ok := namespaces[f.VersionFormat]; ok { if ns, ok := namespaces[f.VersionFormat]; ok {
var currentMap map[string]database.NamespacedFeature var currentMap map[string]introducedFeature
if currentMap, ok = currentFeatures[f.VersionFormat]; !ok { if currentMap, ok = currentFeatures[f.VersionFormat]; !ok {
currentFeatures[f.VersionFormat] = make(map[string]database.NamespacedFeature) currentFeatures[f.VersionFormat] = make(map[string]introducedFeature)
currentMap = currentFeatures[f.VersionFormat] currentMap = currentFeatures[f.VersionFormat]
} }
@ -490,9 +534,12 @@ func computeAncestryFeatures(ancestryLayers []database.LayerWithContent) ([]data
} }
if !inherited { if !inherited {
currentMap[f.Name+":"+f.Version] = database.NamespacedFeature{ currentMap[f.Name+":"+f.Version] = introducedFeature{
Feature: f, feature: database.NamespacedFeature{
Namespace: ns, Feature: f,
Namespace: ns,
},
layerIndex: index,
} }
} }
@ -513,13 +560,16 @@ func computeAncestryFeatures(ancestryLayers []database.LayerWithContent) ([]data
} }
} }
ancestryFeatures := []database.NamespacedFeature{}
for _, featureMap := range features { for _, featureMap := range features {
for _, feature := range featureMap { for _, feature := range featureMap {
ancestryFeatures = append(ancestryFeatures, feature) ancestryLayers[feature.layerIndex].DetectedFeatures = append(
ancestryLayers[feature.layerIndex].DetectedFeatures,
feature.feature,
)
} }
} }
return ancestryFeatures, nil
return ancestryLayers, nil
} }
// getNotProcessedBy returns a processors, which contains the detectors and // getNotProcessedBy returns a processors, which contains the detectors and

View File

@ -41,7 +41,7 @@ type mockDatastore struct {
database.MockDatastore database.MockDatastore
layers map[string]database.LayerWithContent layers map[string]database.LayerWithContent
ancestry map[string]database.AncestryWithFeatures ancestry map[string]database.AncestryWithContent
namespaces map[string]database.Namespace namespaces map[string]database.Namespace
features map[string]database.Feature features map[string]database.Feature
namespacedFeatures map[string]database.NamespacedFeature namespacedFeatures map[string]database.NamespacedFeature
@ -65,32 +65,52 @@ func copyDatastore(md *mockDatastore) mockDatastore {
layers[k] = database.LayerWithContent{ layers[k] = database.LayerWithContent{
Layer: database.Layer{ Layer: database.Layer{
Hash: l.Hash, Hash: l.Hash,
}, ProcessedBy: database.Processors{
ProcessedBy: database.Processors{ Listers: listers,
Listers: listers, Detectors: detectors,
Detectors: detectors, },
}, },
Features: features, Features: features,
Namespaces: namespaces, Namespaces: namespaces,
} }
} }
ancestry := map[string]database.AncestryWithFeatures{} ancestry := map[string]database.AncestryWithContent{}
for k, a := range md.ancestry { for k, a := range md.ancestry {
nf := append([]database.NamespacedFeature(nil), a.Features...) ancestryLayers := []database.AncestryLayer{}
l := append([]database.Layer(nil), a.Layers...) layers := []database.Layer{}
listers := append([]string(nil), a.ProcessedBy.Listers...)
detectors := append([]string(nil), a.ProcessedBy.Detectors...) for _, layer := range a.Layers {
ancestry[k] = database.AncestryWithFeatures{ layers = append(layers, database.Layer{
Hash: layer.Hash,
ProcessedBy: database.Processors{
Detectors: append([]string(nil), layer.Layer.ProcessedBy.Detectors...),
Listers: append([]string(nil), layer.Layer.ProcessedBy.Listers...),
},
})
ancestryLayers = append(ancestryLayers, database.AncestryLayer{
Layer: database.Layer{
Hash: layer.Hash,
ProcessedBy: database.Processors{
Detectors: append([]string(nil), layer.Layer.ProcessedBy.Detectors...),
Listers: append([]string(nil), layer.Layer.ProcessedBy.Listers...),
},
},
DetectedFeatures: append([]database.NamespacedFeature(nil), layer.DetectedFeatures...),
})
}
ancestry[k] = database.AncestryWithContent{
Ancestry: database.Ancestry{ Ancestry: database.Ancestry{
Name: a.Name, Name: a.Name,
Layers: l, Layers: layers,
ProcessedBy: database.Processors{
Detectors: append([]string(nil), a.ProcessedBy.Detectors...),
Listers: append([]string(nil), a.ProcessedBy.Listers...),
},
}, },
ProcessedBy: database.Processors{ Layers: ancestryLayers,
Detectors: detectors,
Listers: listers,
},
Features: nf,
} }
} }
@ -121,7 +141,7 @@ func newMockDatastore() *mockDatastore {
errSessionDone := errors.New("Session Done") errSessionDone := errors.New("Session Done")
md := &mockDatastore{ md := &mockDatastore{
layers: make(map[string]database.LayerWithContent), layers: make(map[string]database.LayerWithContent),
ancestry: make(map[string]database.AncestryWithFeatures), ancestry: make(map[string]database.AncestryWithContent),
namespaces: make(map[string]database.Namespace), namespaces: make(map[string]database.Namespace),
features: make(map[string]database.Feature), features: make(map[string]database.Feature),
namespacedFeatures: make(map[string]database.NamespacedFeature), namespacedFeatures: make(map[string]database.NamespacedFeature),
@ -156,22 +176,20 @@ func newMockDatastore() *mockDatastore {
return nil return nil
} }
session.FctFindAncestry = func(name string) (database.Ancestry, database.Processors, bool, error) { session.FctFindAncestry = func(name string) (database.Ancestry, bool, error) {
processors := database.Processors{}
if session.terminated { if session.terminated {
return database.Ancestry{}, processors, false, errSessionDone return database.Ancestry{}, false, errSessionDone
} }
ancestry, ok := session.copy.ancestry[name] ancestry, ok := session.copy.ancestry[name]
return ancestry.Ancestry, ancestry.ProcessedBy, ok, nil return ancestry.Ancestry, ok, nil
} }
session.FctFindLayer = func(name string) (database.Layer, database.Processors, bool, error) { session.FctFindLayer = func(name string) (database.Layer, bool, error) {
processors := database.Processors{}
if session.terminated { if session.terminated {
return database.Layer{}, processors, false, errSessionDone return database.Layer{}, false, errSessionDone
} }
layer, ok := session.copy.layers[name] layer, ok := session.copy.layers[name]
return layer.Layer, layer.ProcessedBy, ok, nil return layer.Layer, ok, nil
} }
session.FctFindLayerWithContent = func(name string) (database.LayerWithContent, bool, error) { session.FctFindLayerWithContent = func(name string) (database.LayerWithContent, bool, error) {
@ -182,12 +200,12 @@ func newMockDatastore() *mockDatastore {
return layer, ok, nil return layer, ok, nil
} }
session.FctPersistLayer = func(layer database.Layer) error { session.FctPersistLayer = func(hash string) error {
if session.terminated { if session.terminated {
return errSessionDone return errSessionDone
} }
if _, ok := session.copy.layers[layer.Hash]; !ok { if _, ok := session.copy.layers[hash]; !ok {
session.copy.layers[layer.Hash] = database.LayerWithContent{Layer: layer} session.copy.layers[hash] = database.LayerWithContent{Layer: database.Layer{Hash: hash}}
} }
return nil return nil
} }
@ -267,25 +285,20 @@ func newMockDatastore() *mockDatastore {
return nil return nil
} }
session.FctUpsertAncestry = func(ancestry database.Ancestry, features []database.NamespacedFeature, processors database.Processors) error { session.FctUpsertAncestry = func(ancestry database.AncestryWithContent) error {
if session.terminated { if session.terminated {
return errSessionDone return errSessionDone
} }
features := getNamespacedFeatures(ancestry.Layers)
// ensure features are in the database // ensure features are in the database
for _, f := range features { for _, f := range features {
if _, ok := session.copy.namespacedFeatures[NamespacedFeatureKey(&f)]; !ok { if _, ok := session.copy.namespacedFeatures[NamespacedFeatureKey(&f)]; !ok {
return errors.New("namepsaced feature not in db") return errors.New("namespaced feature not in db")
} }
} }
ancestryWFeature := database.AncestryWithFeatures{ session.copy.ancestry[ancestry.Name] = ancestry
Ancestry: ancestry,
Features: features,
ProcessedBy: processors,
}
session.copy.ancestry[ancestry.Name] = ancestryWFeature
return nil return nil
} }
@ -359,9 +372,11 @@ func TestProcessAncestryWithDistUpgrade(t *testing.T) {
} }
assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock", layers)) assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock", layers))
// check the ancestry features // check the ancestry features
assert.Len(t, datastore.ancestry["Mock"].Features, 74) features := getNamespacedFeatures(datastore.ancestry["Mock"].Layers)
for _, f := range datastore.ancestry["Mock"].Features { assert.Len(t, features, 74)
for _, f := range features {
if _, ok := nonUpgradedMap[f.Feature]; ok { if _, ok := nonUpgradedMap[f.Feature]; ok {
assert.Equal(t, "debian:7", f.Namespace.Name) assert.Equal(t, "debian:7", f.Namespace.Name)
} else { } else {
@ -388,20 +403,20 @@ func TestProcessLayers(t *testing.T) {
{Hash: "jessie", Path: testDataPath + "jessie.tar.gz"}, {Hash: "jessie", Path: testDataPath + "jessie.tar.gz"},
} }
processedLayers, err := processLayers(datastore, "Docker", layers) LayerWithContents, err := processLayers(datastore, "Docker", layers)
assert.Nil(t, err) assert.Nil(t, err)
assert.Len(t, processedLayers, 3) assert.Len(t, LayerWithContents, 3)
// ensure resubmit won't break the stuff // ensure resubmit won't break the stuff
processedLayers, err = processLayers(datastore, "Docker", layers) LayerWithContents, err = processLayers(datastore, "Docker", layers)
assert.Nil(t, err) assert.Nil(t, err)
assert.Len(t, processedLayers, 3) assert.Len(t, LayerWithContents, 3)
// Ensure each processed layer is correct // Ensure each processed layer is correct
assert.Len(t, processedLayers[0].Namespaces, 0) assert.Len(t, LayerWithContents[0].Namespaces, 0)
assert.Len(t, processedLayers[1].Namespaces, 1) assert.Len(t, LayerWithContents[1].Namespaces, 1)
assert.Len(t, processedLayers[2].Namespaces, 1) assert.Len(t, LayerWithContents[2].Namespaces, 1)
assert.Len(t, processedLayers[0].Features, 0) assert.Len(t, LayerWithContents[0].Features, 0)
assert.Len(t, processedLayers[1].Features, 52) assert.Len(t, LayerWithContents[1].Features, 52)
assert.Len(t, processedLayers[2].Features, 74) assert.Len(t, LayerWithContents[2].Features, 74)
// Ensure each layer has expected namespaces and features detected // Ensure each layer has expected namespaces and features detected
if blank, ok := datastore.layers["blank"]; ok { if blank, ok := datastore.layers["blank"]; ok {
@ -462,10 +477,10 @@ func TestClairUpgrade(t *testing.T) {
} }
assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock", layers)) assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock", layers))
assert.Len(t, datastore.ancestry["Mock"].Features, 0) assert.Len(t, getNamespacedFeatures(datastore.ancestry["Mock"].Layers), 0)
assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock2", layers2)) assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock2", layers2))
assert.Len(t, datastore.ancestry["Mock2"].Features, 0) assert.Len(t, getNamespacedFeatures(datastore.ancestry["Mock2"].Layers), 0)
// Clair is upgraded to use a new namespace detector. The expected // Clair is upgraded to use a new namespace detector. The expected
// behavior is that all layers will be rescanned with "apt-sources" and // behavior is that all layers will be rescanned with "apt-sources" and
@ -478,7 +493,7 @@ func TestClairUpgrade(t *testing.T) {
// Even though Clair processors are upgraded, the ancestry's features should // Even though Clair processors are upgraded, the ancestry's features should
// not be upgraded without posting the ancestry to Clair again. // not be upgraded without posting the ancestry to Clair again.
assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock", layers)) assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock", layers))
assert.Len(t, datastore.ancestry["Mock"].Features, 0) assert.Len(t, getNamespacedFeatures(datastore.ancestry["Mock"].Layers), 0)
// Clair is upgraded to use a new feature lister. The expected behavior is // Clair is upgraded to use a new feature lister. The expected behavior is
// that all layers will be rescanned with "dpkg" and the ancestry's features // that all layers will be rescanned with "dpkg" and the ancestry's features
@ -489,18 +504,18 @@ func TestClairUpgrade(t *testing.T) {
} }
assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock", layers)) assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock", layers))
assert.Len(t, datastore.ancestry["Mock"].Features, 74) assert.Len(t, getNamespacedFeatures(datastore.ancestry["Mock"].Layers), 74)
assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock2", layers2)) assert.Nil(t, ProcessAncestry(datastore, "Docker", "Mock2", layers2))
assert.Len(t, datastore.ancestry["Mock2"].Features, 52) assert.Len(t, getNamespacedFeatures(datastore.ancestry["Mock2"].Layers), 52)
// check the namespaces are correct // check the namespaces are correct
for _, f := range datastore.ancestry["Mock"].Features { for _, f := range getNamespacedFeatures(datastore.ancestry["Mock"].Layers) {
if !assert.NotEqual(t, database.Namespace{}, f.Namespace) { if !assert.NotEqual(t, database.Namespace{}, f.Namespace) {
assert.Fail(t, "Every feature should have a namespace attached") assert.Fail(t, "Every feature should have a namespace attached")
} }
} }
for _, f := range datastore.ancestry["Mock2"].Features { for _, f := range getNamespacedFeatures(datastore.ancestry["Mock2"].Layers) {
if !assert.NotEqual(t, database.Namespace{}, f.Namespace) { if !assert.NotEqual(t, database.Namespace{}, f.Namespace) {
assert.Fail(t, "Every feature should have a namespace attached") assert.Fail(t, "Every feature should have a namespace attached")
} }
@ -624,8 +639,9 @@ func TestComputeAncestryFeatures(t *testing.T) {
}: false, }: false,
} }
features, err := computeAncestryFeatures(layers) ancestryLayers, err := computeAncestryLayers(layers, database.Processors{})
assert.Nil(t, err) assert.Nil(t, err)
features := getNamespacedFeatures(ancestryLayers)
for _, f := range features { for _, f := range features {
if assert.Contains(t, expected, f) { if assert.Contains(t, expected, f) {
if assert.False(t, expected[f]) { if assert.False(t, expected[f]) {