Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
d0g0x01 committed Aug 4, 2023
1 parent 1f81db5 commit 15bdb9e
Show file tree
Hide file tree
Showing 6 changed files with 216 additions and 132 deletions.
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,10 @@ test: ## Run the full suite of unit tests
system-test: | backend-reset ## Run the system tests
cd test/system && export KUBECONFIG=$(ROOT_DIR)/test/setup/${KIND_KUBECONFIG} && go test -v -timeout "60s" -count=1 ./...

.PHONY: system-test-fast
system-test-fast: ## Run the system tests
cd test/system && export KUBECONFIG=$(ROOT_DIR)/test/setup/${KIND_KUBECONFIG} && go test -v -timeout "60s" -count=1 ./...

.PHONY: local-cluster-deploy
local-cluster-deploy: ## Create a kind cluster with some vulnerables resources (pods, roles, ...)
bash test/setup/manage-cluster.sh destroy
Expand Down
2 changes: 1 addition & 1 deletion deployments/kubehound/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ services:
- gremlinserver.threadPoolWorker=8 # should be 2x VCPU (TODO: can we set dynamically?)
healthcheck:
test: ["CMD", "bin/gremlin.sh", "-e", "scripts/remote-connect.groovy"]
interval: 60s
interval: 30s
timeout: 30s
retries: 3
labels:
Expand Down
251 changes: 130 additions & 121 deletions pkg/kubehound/ingestor/pipeline/endpoint_ingest_test.go
Original file line number Diff line number Diff line change
@@ -1,123 +1,132 @@
package pipeline

// import (
// "context"
// "testing"

// "github.com/DataDog/KubeHound/pkg/collector"
// mockcollect "github.com/DataDog/KubeHound/pkg/collector/mockcollector"
// "github.com/DataDog/KubeHound/pkg/config"
// "github.com/DataDog/KubeHound/pkg/globals/types"
// "github.com/DataDog/KubeHound/pkg/kubehound/models/store"
// "github.com/DataDog/KubeHound/pkg/kubehound/storage/cache"
// "github.com/DataDog/KubeHound/pkg/kubehound/storage/cache/cachekey"
// mockcache "github.com/DataDog/KubeHound/pkg/kubehound/storage/cache/mocks"
// graphdb "github.com/DataDog/KubeHound/pkg/kubehound/storage/graphdb/mocks"
// storedb "github.com/DataDog/KubeHound/pkg/kubehound/storage/storedb/mocks"
// "github.com/DataDog/KubeHound/pkg/kubehound/store/collections"
// "github.com/stretchr/testify/assert"
// "github.com/stretchr/testify/mock"
// )

// func TestRoleBindingIngest_Pipeline(t *testing.T) {
// ri := &RoleBindingIngest{}

// ctx := context.Background()
// fakeRb, err := loadTestObject[types.RoleBindingType]("testdata/rolebinding.json")
// assert.NoError(t, err)

// client := mockcollect.NewCollectorClient(t)
// client.EXPECT().StreamRoleBindings(ctx, ri).
// RunAndReturn(func(ctx context.Context, i collector.RoleBindingIngestor) error {
// // Fake the stream of a single role binding from the collector client
// err := i.IngestRoleBinding(ctx, fakeRb)
// if err != nil {
// return err
// }

// return i.Complete(ctx)
// })

// // Cache setup
// c := mockcache.NewCacheProvider(t)

// c.EXPECT().Get(ctx, cachekey.Identity("app-monitors", "test-app")).Return(&cache.CacheResult{
// Value: nil,
// Err: cache.ErrNoEntry,
// }).Once()
// c.EXPECT().Get(ctx, cachekey.Role("test-reader", "test-app")).Return(&cache.CacheResult{
// Value: store.ObjectID().Hex(),
// Err: nil,
// }).Once()

// // Store setup - rolebindings
// sdb := storedb.NewProvider(t)
// rsw := storedb.NewAsyncWriter(t)
// crbs := collections.RoleBinding{}
// rsw.EXPECT().Queue(ctx, mock.AnythingOfType("*store.RoleBinding")).Return(nil).Once()
// rsw.EXPECT().Flush(ctx).Return(nil)
// rsw.EXPECT().Close(ctx).Return(nil)
// sdb.EXPECT().BulkWriter(ctx, crbs, mock.Anything).Return(rsw, nil)

// // Store setup - identities
// isw := storedb.NewAsyncWriter(t)
// csw := mockcache.NewAsyncWriter(t)
// csw.EXPECT().Queue(ctx, mock.AnythingOfType("*cachekey.identityCacheKey"), mock.AnythingOfType("string")).Return(nil)
// csw.EXPECT().Flush(ctx).Return(nil)
// csw.EXPECT().Close(ctx).Return(nil)

// identities := collections.Identity{}
// storeId := store.ObjectID()
// isw.EXPECT().Queue(ctx, mock.AnythingOfType("*store.Identity")).
// RunAndReturn(func(ctx context.Context, i any) error {
// i.(*store.Identity).Id = storeId
// return nil
// }).Once()
// isw.EXPECT().Flush(ctx).Return(nil)
// isw.EXPECT().Close(ctx).Return(nil)
// sdb.EXPECT().BulkWriter(ctx, identities, mock.Anything).Return(isw, nil)
// c.EXPECT().BulkWriter(ctx, mock.AnythingOfType("cache.WriterOption")).Return(csw, nil)

// // Graph setup
// vtxInsert := map[string]any{
// "isNamespaced": true,
// "critical": false,
// "name": "app-monitors",
// "namespace": "test-app",
// "storeID": storeId.Hex(),
// "type": "ServiceAccount",
// "team": "test-team",
// "app": "test-app",
// "service": "test-service",
// }
// gdb := graphdb.NewProvider(t)
// gw := graphdb.NewAsyncVertexWriter(t)
// gw.EXPECT().Queue(ctx, vtxInsert).Return(nil).Once()
// gw.EXPECT().Flush(ctx).Return(nil)
// gw.EXPECT().Close(ctx).Return(nil)
// gdb.EXPECT().VertexWriter(ctx, mock.AnythingOfType("*vertex.Identity"), c, mock.AnythingOfType("graphdb.WriterOption")).Return(gw, nil)

// deps := &Dependencies{
// Collector: client,
// Cache: c,
// GraphDB: gdb,
// StoreDB: sdb,
// Config: &config.KubehoundConfig{
// Builder: config.BuilderConfig{
// Edge: config.EdgeBuilderConfig{},
// },
// },
// }

// // Initialize
// err = ri.Initialize(ctx, deps)
// assert.NoError(t, err)

// // Run
// err = ri.Run(ctx)
// assert.NoError(t, err)

// // Close
// err = ri.Close(ctx)
// assert.NoError(t, err)
// }
import (
"context"
"testing"

"github.com/DataDog/KubeHound/pkg/collector"
mockcollect "github.com/DataDog/KubeHound/pkg/collector/mockcollector"
"github.com/DataDog/KubeHound/pkg/config"
"github.com/DataDog/KubeHound/pkg/globals/types"
"github.com/DataDog/KubeHound/pkg/kubehound/models/shared"
"github.com/DataDog/KubeHound/pkg/kubehound/models/store"
mockcache "github.com/DataDog/KubeHound/pkg/kubehound/storage/cache/mocks"
graphdb "github.com/DataDog/KubeHound/pkg/kubehound/storage/graphdb/mocks"
storedb "github.com/DataDog/KubeHound/pkg/kubehound/storage/storedb/mocks"
"github.com/DataDog/KubeHound/pkg/kubehound/store/collections"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)

func TestEndpointSlice_Pipeline(t *testing.T) {
ei := &EndpointIngest{}

ctx := context.Background()
fakeEp, err := loadTestObject[types.EndpointType]("testdata/endpointslice.json")
assert.NoError(t, err)

client := mockcollect.NewCollectorClient(t)
client.EXPECT().StreamEndpoints(ctx, ei).
RunAndReturn(func(ctx context.Context, i collector.EndpointIngestor) error {
// Fake the stream of a single role binding from the collector client
err := i.IngestEndpoint(ctx, fakeEp)
if err != nil {
return err
}

return i.Complete(ctx)
})

// Cache setup
c := mockcache.NewCacheProvider(t)
cw := mockcache.NewAsyncWriter(t)
cw.EXPECT().Queue(ctx, mock.AnythingOfType("*cachekey.endpointCacheKey"), true).Return(nil).Times(2)
cw.EXPECT().Flush(ctx).Return(nil)
cw.EXPECT().Close(ctx).Return(nil)
c.EXPECT().BulkWriter(ctx, mock.AnythingOfType("cache.WriterOption")).Return(cw, nil)

// Store setup
sdb := storedb.NewProvider(t)
sw := storedb.NewAsyncWriter(t)
endpoints := collections.Endpoint{}
storeId := store.ObjectID()
sw.EXPECT().Queue(ctx, mock.AnythingOfType("*store.Endpoint")).
RunAndReturn(func(ctx context.Context, i any) error {
i.(*store.Endpoint).Id = storeId
return nil
}).Times(2)

sw.EXPECT().Flush(ctx).Return(nil)
sw.EXPECT().Close(ctx).Return(nil)
sdb.EXPECT().BulkWriter(ctx, endpoints, mock.Anything).Return(sw, nil)

// Graph setup
vtx1 := map[string]interface{}{
"addressType": "IPv4",
"addresses": []interface{}{"10.1.1.1"},
"app": "cassandra",
"compromised": float64(0),
"exposure": float64(shared.EndpointExposureExternal),
"isNamespaced": true,
"name": "cassandra-temporal-dev-kmwfp::TCP::cql",
"namespace": "cassandra-temporal-dev",
"port": float64(9042),
"portName": "cql",
"protocol": "TCP",
"service": "cassandra",
"serviceDns": "cassandra-temporal-dev.cassandra-temporal-dev.stripe.us1.staging.dog",
"serviceEndpoint": "cassandra-temporal-dev",
"storeID": storeId.Hex(),
"team": "workflow-engine",
}
vtx2 := map[string]interface{}{
"addressType": "IPv4",
"addresses": []interface{}{"10.1.1.1"},
"app": "cassandra",
"compromised": float64(0),
"exposure": float64(shared.EndpointExposureExternal),
"isNamespaced": true,
"name": "cassandra-temporal-dev-kmwfp::TCP::jmx",
"namespace": "cassandra-temporal-dev",
"port": float64(7199),
"portName": "jmx",
"protocol": "TCP",
"service": "cassandra",
"serviceDns": "cassandra-temporal-dev.cassandra-temporal-dev.stripe.us1.staging.dog",
"serviceEndpoint": "cassandra-temporal-dev",
"storeID": storeId.Hex(),
"team": "workflow-engine",
}

gdb := graphdb.NewProvider(t)
gw := graphdb.NewAsyncVertexWriter(t)
gw.EXPECT().Queue(ctx, vtx1).Return(nil).Once()
gw.EXPECT().Queue(ctx, vtx2).Return(nil).Once()
gw.EXPECT().Flush(ctx).Return(nil)
gw.EXPECT().Close(ctx).Return(nil)
gdb.EXPECT().VertexWriter(ctx, mock.AnythingOfType("*vertex.Endpoint"), c, mock.AnythingOfType("graphdb.WriterOption")).Return(gw, nil)

deps := &Dependencies{
Collector: client,
Cache: c,
GraphDB: gdb,
StoreDB: sdb,
Config: &config.KubehoundConfig{
Builder: config.BuilderConfig{
Edge: config.EdgeBuilderConfig{},
},
},
}

// Initialize
err = ei.Initialize(ctx, deps)
assert.NoError(t, err)

// Run
err = ei.Run(ctx)
assert.NoError(t, err)

// Close
err = ei.Close(ctx)
assert.NoError(t, err)
}
71 changes: 71 additions & 0 deletions pkg/kubehound/ingestor/pipeline/testdata/endpointslice.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
{
"addressType": "IPv4",
"apiVersion": "discovery.k8s.io/v1",
"endpoints": [
{
"addresses": [
"10.1.1.1"
],
"conditions": {
"ready": true,
"serving": true,
"terminating": false
},
"hostname": "cassandra-temporal",
"nodeName": "node.ec2.internal",
"targetRef": {
"kind": "Pod",
"name": "cassandra-temporal",
"namespace": "cassandra-temporal-dev",
"uid": "230ffb57-70f4-4eb2-a12a-d27e280e580e"
},
"zone": "us-east-1c"
}
],
"kind": "EndpointSlice",
"metadata": {
"annotations": {
"endpoints.kubernetes.io/last-change-trigger-time": "2023-07-31T23:45:17Z"
},
"creationTimestamp": "2023-05-09T22:23:17Z",
"generateName": "cassandra-temporal-dev-",
"generation": 582,
"labels": {
"app": "cassandra",
"chart_name": "cassandra",
"cluster": "cassandra-temporal-dev",
"endpointslice.kubernetes.io/managed-by": "endpointslice-controller.k8s.io",
"kubernetes.io/service-name": "cassandra-temporal-dev",
"name": "cassandra-temporal-dev",
"service": "cassandra",
"service.kubernetes.io/headless": "",
"team": "workflow-engine"
},
"name": "cassandra-temporal-dev-kmwfp",
"namespace": "cassandra-temporal-dev",
"ownerReferences": [
{
"apiVersion": "v1",
"blockOwnerDeletion": true,
"controller": true,
"kind": "Service",
"name": "cassandra-temporal-dev",
"uid": "5f7906ca-411c-43d7-9dcf-8ef388a2fd94"
}
],
"resourceVersion": "19403674728",
"uid": "678ebe2b-1565-4700-8d8c-8efebaa4deba"
},
"ports": [
{
"name": "cql",
"port": 9042,
"protocol": "TCP"
},
{
"name": "jmx",
"port": 7199,
"protocol": "TCP"
}
]
}
16 changes: 8 additions & 8 deletions test/setup/test-cluster/attacks/TOKEN_VAR_LOG_SYMLINK.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
namespace: default
name: varlog
name: read-logs
rules:
- apiGroups: [""] # "" indicates the core API group
resources: ["pods/log", "pods", "nodes", "nodes/log"]
Expand All @@ -18,16 +18,16 @@ rules:
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: varlog
name: pod-read-logs
namespace: default
subjects:
- kind: ServiceAccount
name: varlog-sa
apiGroup: ""
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: varlog
apiGroup: ""
name: read-logs
subjects:
- kind: ServiceAccount
name: varrlog-sa
namespace: default
---
apiVersion: v1
kind: Pod
Expand Down
4 changes: 2 additions & 2 deletions test/system/vertex.gen.go
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// PLEASE DO NOT EDIT
// THIS HAS BEEN GENERATED AUTOMATICALLY on 2023-08-03 15:20
// THIS HAS BEEN GENERATED AUTOMATICALLY on 2023-08-04 12:14
//
// Generate it with "go generate ./..."
//
Expand Down Expand Up @@ -307,7 +307,7 @@ var expectedContainers = map[string]graph.Container{
Command: []string{},
Args: []string{},
Capabilities: []string{},
Privileged: false,
Privileged: true,
PrivEsc: false,
HostPID: false,
HostIPC: false,
Expand Down

0 comments on commit 15bdb9e

Please sign in to comment.