From c519f8e5720afee7caf4423cba253ab21b068fde Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 16 Dec 2024 17:32:22 +0200 Subject: [PATCH 01/76] added source and destination Groups to Rule --- pkg/model/parser.go | 53 ++++++++++++++++++++++++++++----------------- 1 file changed, 33 insertions(+), 20 deletions(-) diff --git a/pkg/model/parser.go b/pkg/model/parser.go index 26e7128..cd41779 100644 --- a/pkg/model/parser.go +++ b/pkg/model/parser.go @@ -42,6 +42,7 @@ type NSXConfigParser struct { file string rc *collector.ResourcesContainerModel configRes *config + groups []*collector.Group allGroupsVMs []*endpoints.VM // store references to groups/services objects from paths used in fw rules groupPathsToObjects map[string]*collector.Group @@ -110,7 +111,7 @@ func (p *NSXConfigParser) getDFW() { // more fields to consider: sequence_number , stateful,tcp_strict, unique_id // This scope will take precedence over rule level scope. - scope := p.getEndpointsFromGroupsPaths(secPolicy.Scope) + scope, _ := p.getEndpointsFromGroupsPaths(secPolicy.Scope) policyHasScope := !slices.Equal(secPolicy.Scope, []string{anyStr}) rules := secPolicy.Rules @@ -120,7 +121,7 @@ func (p *NSXConfigParser) getDFW() { r.scope = scope // scope from policy if !policyHasScope { // if policy scope is not configured, rule's scope takes effect - r.scope = p.getEndpointsFromGroupsPaths(rule.Scope) + r.scope, _ = p.getEndpointsFromGroupsPaths(rule.Scope) } r.secPolicyName = *secPolicy.DisplayName p.addFWRule(r, category, rule) @@ -158,10 +159,12 @@ func (p *NSXConfigParser) getDefaultRule(secPolicy *collector.SecurityPolicy) *p res := &parsedRule{} // scope - the list of group paths where the rules in this policy will get applied. scope := secPolicy.Scope - vms := p.getEndpointsFromGroupsPaths(scope) + vms, groups := p.getEndpointsFromGroupsPaths(scope) // rule applied as any-to-any only for ths VMs in the scope of the SecurityPolicy res.srcVMs = vms res.dstVMs = vms + res.SrcGroups = groups + res.DstGroup = groups switch string(*secPolicy.ConnectivityPreference) { case string(nsx.SecurityPolicyConnectivityPreferenceALLOWLIST), @@ -183,8 +186,12 @@ func (p *NSXConfigParser) getDefaultRule(secPolicy *collector.SecurityPolicy) *p } type parsedRule struct { - srcVMs []*endpoints.VM - dstVMs []*endpoints.VM + srcVMs []*endpoints.VM + dstVMs []*endpoints.VM + // todo: In this stage we are not analyzing the complete expr, yet. In this stage we will only handle src and dst + // defined by groups, thus the following SrcGroups and DstGroup + SrcGroups []*collector.Group + DstGroup []*collector.Group action string conn *netset.TransportSet direction string @@ -194,32 +201,38 @@ type parsedRule struct { defaultRuleObj *collector.FirewallRule } -func (p *NSXConfigParser) allGroups() []*endpoints.VM { +func (p *NSXConfigParser) allGroups() ([]*endpoints.VM, []*collector.Group) { + // p.allGroupsVMs and p.groups are written together if len(p.allGroupsVMs) > 0 { - return p.allGroupsVMs + return p.allGroupsVMs, p.groups } - res := []*endpoints.VM{} + vms := []*endpoints.VM{} + groups := []*collector.Group{} for i := range p.rc.DomainList { domainRsc := &p.rc.DomainList[i].Resources for j := range domainRsc.GroupList { - res = append(res, p.groupToVMsList(&domainRsc.GroupList[j])...) + vms = append(vms, p.groupToVMsList(&domainRsc.GroupList[j])...) + groups = append(groups, &domainRsc.GroupList[j]) } } - p.allGroupsVMs = res - return res + p.allGroupsVMs = vms + return vms, groups } -func (p *NSXConfigParser) getEndpointsFromGroupsPaths(groupsPaths []string) []*endpoints.VM { +func (p *NSXConfigParser) getEndpointsFromGroupsPaths(groupsPaths []string) ([]*endpoints.VM, []*collector.Group) { if slices.Contains(groupsPaths, anyStr) { // TODO: if a VM is not within any group, this should not include that VM? return p.allGroups() // all groups } - res := []*endpoints.VM{} + vms := []*endpoints.VM{} + groups := []*collector.Group{} // TODO: support IP Addresses in groupsPaths for _, groupPath := range groupsPaths { - res = append(res, p.getGroupVMs(groupPath)...) + thisGroupVMs, thisGroup := p.getGroupVMs(groupPath) + vms = append(vms, thisGroupVMs...) + groups = append(groups, thisGroup) } - return res + return vms, groups } // type *collector.FirewallRule is deprecated but used to collect default rule per securityPolicy @@ -238,9 +251,9 @@ func (p *NSXConfigParser) getDFWRule(rule *collector.Rule) *parsedRule { // the source groups. If false, the rule applies to the source groups // TODO: handle excluded fields // srcExclude := rule.SourcesExcluded - res.srcVMs = p.getEndpointsFromGroupsPaths(srcGroups) + res.srcVMs, res.SrcGroups = p.getEndpointsFromGroupsPaths(srcGroups) dstGroups := rule.DestinationGroups - res.dstVMs = p.getEndpointsFromGroupsPaths(dstGroups) + res.dstVMs, res.DstGroup = p.getEndpointsFromGroupsPaths(dstGroups) res.action = string(*rule.Action) res.conn = p.getRuleConnections(rule) @@ -371,7 +384,7 @@ func (p *NSXConfigParser) groupToVMsList(group *collector.Group) []*endpoints.VM return res } -func (p *NSXConfigParser) getGroupVMs(groupPath string) []*endpoints.VM { +func (p *NSXConfigParser) getGroupVMs(groupPath string) ([]*endpoints.VM, *collector.Group) { for i := range p.rc.DomainList { domainRsc := p.rc.DomainList[i].Resources for j := range domainRsc.GroupList { @@ -380,11 +393,11 @@ func (p *NSXConfigParser) getGroupVMs(groupPath string) []*endpoints.VM { if _, ok := p.groupPathsToObjects[groupPath]; !ok { p.groupPathsToObjects[groupPath] = g } - return p.groupToVMsList(g) + return p.groupToVMsList(g), g } } } - return nil // could not find given groupPath (add warning) + return nil, nil // could not find given groupPath (add warning) } ///////////////////////////////////////////////////////////////////////////////////////////////////////////// From 89ffc7dc2e64a731780350fd8cbb2697197f52a5 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 17 Dec 2024 12:50:22 +0200 Subject: [PATCH 02/76] added source and destination Groups to FwRule --- pkg/model/dfw/category.go | 8 +++++--- pkg/model/dfw/dfw.go | 8 +++++--- pkg/model/dfw/rule.go | 2 ++ pkg/model/parser.go | 2 +- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/pkg/model/dfw/category.go b/pkg/model/dfw/category.go index 7ca5445..ce6cb3b 100644 --- a/pkg/model/dfw/category.go +++ b/pkg/model/dfw/category.go @@ -181,12 +181,14 @@ func (c *categorySpec) outboundEffectiveRules() string { return strings.Join(rulesStr, lineSeparatorStr) } -func (c *categorySpec) addRule(src, dst []*endpoints.VM, conn *netset.TransportSet, +func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups []*collector.Group, conn *netset.TransportSet, action, direction string, ruleID int, origRule *collector.Rule, scope []*endpoints.VM, secPolicyName string, origDefaultRule *collector.FirewallRule) { newRule := &FwRule{ srcVMs: src, dstVMs: dst, + srcGroups: srcGroups, + dstGroups: dstGroups, conn: conn, action: actionFromString(action), direction: direction, @@ -198,8 +200,8 @@ func (c *categorySpec) addRule(src, dst []*endpoints.VM, conn *netset.TransportS secPolicyCategory: c.category.string(), categoryRef: c, dfwRef: c.dfwRef, - symbolicSrc: []*symbolicexpr.SymbolicPath{}, // todo tmp - symbolicDst: []*symbolicexpr.SymbolicPath{}, // todo tmp + symbolicSrc: []*symbolicexpr.SymbolicPath{}, + symbolicDst: []*symbolicexpr.SymbolicPath{}, } c.rules = append(c.rules, newRule) diff --git a/pkg/model/dfw/dfw.go b/pkg/model/dfw/dfw.go index cd4a641..5c7c5a5 100644 --- a/pkg/model/dfw/dfw.go +++ b/pkg/model/dfw/dfw.go @@ -114,11 +114,13 @@ func (d *DFW) AllEffectiveRules() string { // AddRule func for testing purposes -func (d *DFW) AddRule(src, dst []*endpoints.VM, conn *netset.TransportSet, categoryStr, actionStr, direction string, - ruleID int, origRule *collector.Rule, scope []*endpoints.VM, secPolicyName string, origDefaultRule *collector.FirewallRule) { +func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups []*collector.Group, conn *netset.TransportSet, + categoryStr, actionStr, direction string, ruleID int, origRule *collector.Rule, scope []*endpoints.VM, + secPolicyName string, origDefaultRule *collector.FirewallRule) { for _, fwCategory := range d.categoriesSpecs { if fwCategory.category.string() == categoryStr { - fwCategory.addRule(src, dst, conn, actionStr, direction, ruleID, origRule, scope, secPolicyName, origDefaultRule) + fwCategory.addRule(src, dst, srcGroups, dstGroups, conn, actionStr, direction, ruleID, origRule, scope, + secPolicyName, origDefaultRule) } } } diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index afbfba6..a516c53 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -61,6 +61,8 @@ type FwRule struct { srcVMs []*endpoints.VM dstVMs []*endpoints.VM scope []*endpoints.VM + srcGroups []*collector.Group + dstGroups []*collector.Group conn *netset.TransportSet action ruleAction direction string // "IN","OUT", "IN_OUT" diff --git a/pkg/model/parser.go b/pkg/model/parser.go index cd41779..72bba28 100644 --- a/pkg/model/parser.go +++ b/pkg/model/parser.go @@ -148,7 +148,7 @@ func (p *NSXConfigParser) getDFW() { } func (p *NSXConfigParser) addFWRule(r *parsedRule, category string, origRule *collector.Rule) { - p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.conn, category, r.action, r.direction, r.ruleID, + p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.SrcGroups, r.DstGroup, r.conn, category, r.action, r.direction, r.ruleID, origRule, r.scope, r.secPolicyName, r.defaultRuleObj) } From 6f528f6a21229ac987f37935f35a307e08d4a819 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 16 Dec 2024 12:43:33 +0200 Subject: [PATCH 03/76] label can be group --- pkg/collector/data_model.go | 4 ++++ pkg/symbolicexpr/atomic.go | 2 ++ 2 files changed, 6 insertions(+) diff --git a/pkg/collector/data_model.go b/pkg/collector/data_model.go index fce1d4c..d545a66 100644 --- a/pkg/collector/data_model.go +++ b/pkg/collector/data_model.go @@ -431,6 +431,10 @@ func (group *Group) UnmarshalJSON(b []byte) error { ) } +func (group *Group) Name() string { + return *group.Group.DisplayName +} + /////////////////////////////////////////////////////////////////////////////////////// type Domain struct { diff --git a/pkg/symbolicexpr/atomic.go b/pkg/symbolicexpr/atomic.go index ef1df17..3c03c55 100644 --- a/pkg/symbolicexpr/atomic.go +++ b/pkg/symbolicexpr/atomic.go @@ -18,6 +18,8 @@ func (term atomicTerm) string() string { labelType = "virtual machine " case *collector.Tag: labelType = "tag " + case *collector.Group: + labelType = "group " } return labelType + term.label.Name() + equalSign + term.toVal } From 167bfe9fb009ec6f064bc87d51a722bb923577b9 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 17 Dec 2024 12:59:52 +0200 Subject: [PATCH 04/76] synthesis skeleton --- pkg/synthesis/synthesis.go | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 pkg/synthesis/synthesis.go diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go new file mode 100644 index 0000000..a812a0e --- /dev/null +++ b/pkg/synthesis/synthesis.go @@ -0,0 +1,19 @@ +package synthesis + +import ( + "github.com/np-guard/vmware-analyzer/pkg/collector" + "github.com/np-guard/vmware-analyzer/pkg/model" +) + +func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.OutputParameters) (string, error) { + parser := model.NewNSXConfigParserFromResourcesContainer(recourses) + err := parser.RunParser() + if err != nil { + return "", err + } + config := parser.GetConfig() + + _ = config + + return "", nil +} From 24440c2c13d655eca46eda825b03f6e4d6650cf2 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 17 Dec 2024 14:18:18 +0200 Subject: [PATCH 05/76] typo fix --- pkg/model/parser.go | 1 + 1 file changed, 1 insertion(+) diff --git a/pkg/model/parser.go b/pkg/model/parser.go index 72bba28..1d9c311 100644 --- a/pkg/model/parser.go +++ b/pkg/model/parser.go @@ -216,6 +216,7 @@ func (p *NSXConfigParser) allGroups() ([]*endpoints.VM, []*collector.Group) { } } p.allGroupsVMs = vms + p.groups = groups return vms, groups } From 33039e82ac0090caf8ab5a7c205fd7a1bb74b32a Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 17 Dec 2024 14:20:03 +0200 Subject: [PATCH 06/76] synthesis test --- pkg/synthesis/synthesis_test.go | 42 +++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 pkg/synthesis/synthesis_test.go diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go new file mode 100644 index 0000000..0030599 --- /dev/null +++ b/pkg/synthesis/synthesis_test.go @@ -0,0 +1,42 @@ +package synthesis + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/np-guard/vmware-analyzer/pkg/collector/data" + "github.com/np-guard/vmware-analyzer/pkg/logging" + "github.com/np-guard/vmware-analyzer/pkg/model" +) + +type synthesisTest struct { + name string + exData data.Example +} + +var allTests = []synthesisTest{ + { + name: "ExampleDumb", + exData: data.ExampleDumb, + }, +} + +func (a *synthesisTest) run(t *testing.T) { + params := model.OutputParameters{ + Format: "txt", + } + rc := data.ExamplesGeneration(a.exData) + res, err := NSXSynthesis(rc, params) + require.Nil(t, err) + fmt.Println(res) +} + +func TestSynthesis(t *testing.T) { + logging.Init(logging.HighVerbosity) + for i := range allTests { + test := &allTests[i] + test.run(t) + } +} From 32148168cf332b25dc4ed93a93af09f0784e0578 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 17 Dec 2024 14:32:20 +0200 Subject: [PATCH 07/76] NewAtomicTerm --- pkg/symbolicexpr/atomic.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkg/symbolicexpr/atomic.go b/pkg/symbolicexpr/atomic.go index 3c03c55..93c79ee 100644 --- a/pkg/symbolicexpr/atomic.go +++ b/pkg/symbolicexpr/atomic.go @@ -24,6 +24,10 @@ func (term atomicTerm) string() string { return labelType + term.label.Name() + equalSign + term.toVal } +func NewAtomicTerm(label vmLabel, toVal string, neg bool) *atomicTerm { + return &atomicTerm{label: label, toVal: toVal, neg: neg} +} + // negate an atomicTerm expression; return pointer to corresponding expression from Atomics, if not there yet then add it func (term atomicTerm) negate() atomic { return atomicTerm{label: term.label, toVal: term.toVal, neg: !term.neg} From 439cb1ef9727e15145d5b094cb1e1bd80c4d2a69 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 17 Dec 2024 14:38:31 +0200 Subject: [PATCH 08/76] renaming --- pkg/symbolicexpr/atomic.go | 10 ++++----- pkg/symbolicexpr/model.go | 16 +++++++------- pkg/symbolicexpr/symbolicexpr_test.go | 30 +++++++++++++-------------- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/pkg/symbolicexpr/atomic.go b/pkg/symbolicexpr/atomic.go index 93c79ee..da3b37a 100644 --- a/pkg/symbolicexpr/atomic.go +++ b/pkg/symbolicexpr/atomic.go @@ -11,7 +11,7 @@ func (term atomicTerm) string() string { equalSign = " != " } labelType := "" - switch term.label.(type) { + switch term.property.(type) { case *collector.Segment: labelType = "segment " case *endpoints.VM: @@ -21,16 +21,16 @@ func (term atomicTerm) string() string { case *collector.Group: labelType = "group " } - return labelType + term.label.Name() + equalSign + term.toVal + return labelType + term.property.Name() + equalSign + term.toVal } -func NewAtomicTerm(label vmLabel, toVal string, neg bool) *atomicTerm { - return &atomicTerm{label: label, toVal: toVal, neg: neg} +func NewAtomicTerm(label vmProperty, toVal string, neg bool) *atomicTerm { + return &atomicTerm{property: label, toVal: toVal, neg: neg} } // negate an atomicTerm expression; return pointer to corresponding expression from Atomics, if not there yet then add it func (term atomicTerm) negate() atomic { - return atomicTerm{label: term.label, toVal: term.toVal, neg: !term.neg} + return atomicTerm{property: term.property, toVal: term.toVal, neg: !term.neg} } func (atomicTerm) isTautology() bool { diff --git a/pkg/symbolicexpr/model.go b/pkg/symbolicexpr/model.go index 9627ee6..a87d842 100644 --- a/pkg/symbolicexpr/model.go +++ b/pkg/symbolicexpr/model.go @@ -2,21 +2,21 @@ package symbolicexpr // the package implements a symbolic expression of enabled paths from symbolic src to symbolic dst, expressed as CNF -// Virtual machines' labels used in atomic group expr, e.g. tag = "backend" +// Virtual machines' properties used in atomic group expr, e.g. group = Gryffindor, tag = "backend" // Used by NSX: Tag, Segment, (VM) Name, OS_Name, Computer_Name -// vmLabel implemented by collector.Segment, endpoints.vm, synthesis.Tag +// vmProperty implemented by collector.Segment, endpoints.vm, synthesis.Tag // todo: Support OSName and ComputerName at POC? -type vmLabel interface { +type vmProperty interface { Name() string } // atomicTerm represent a simple condition, atom of defining a group: -// tag/segment/name(/computer_Name/OS_Name?) equal/not equal string -// formally, atomicTerm -> label equal const_string, not atomicTerm +// group/tag/segment/name(/computer_Name/OS_Name?) equal/not equal string +// formally, atomicTerm -> property equal const_string, not atomicTerm type atomicTerm struct { - label vmLabel - toVal string - neg bool + property vmProperty + toVal string + neg bool } // tautology represents a condition that always holds. diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index b4be674..08c64e0 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -23,7 +23,7 @@ func TestSymbolicPaths(t *testing.T) { conjSrc, conjDst, conjEmpty := Conjunction{}, Conjunction{}, Conjunction{} for i := 1; i <= 3; i++ { testTag := initTestTag(fmt.Sprintf("t%v", i)) - atomic := &atomicTerm{label: testTag, toVal: fmt.Sprintf("str%v", i)} + atomic := &atomicTerm{property: testTag, toVal: fmt.Sprintf("str%v", i)} conjSrc = *conjSrc.add(atomic) negateAtomic := atomic.negate().(atomicTerm) conjDst = *conjDst.add(&negateAtomic) @@ -48,16 +48,16 @@ func TestSymbolicPaths(t *testing.T) { func TestComputeAllowGivenDenySingleTermEach(t *testing.T) { conjSrc1, conjDst1, conjSrc2, conjDst2 := Conjunction{}, Conjunction{}, Conjunction{}, Conjunction{} testSrc1 := initTestTag("s1") - atomic1 := &atomicTerm{label: testSrc1, toVal: "str1"} + atomic1 := &atomicTerm{property: testSrc1, toVal: "str1"} conjSrc1 = *conjSrc1.add(atomic1) testDst1 := initTestTag("d1") - atomicDst1 := &atomicTerm{label: testDst1, toVal: "str1"} + atomicDst1 := &atomicTerm{property: testDst1, toVal: "str1"} conjDst1 = *conjDst1.add(atomicDst1) testSrc2 := initTestTag("s2") - atomic2 := &atomicTerm{label: testSrc2, toVal: "str2"} + atomic2 := &atomicTerm{property: testSrc2, toVal: "str2"} conjSrc2 = *conjSrc2.add(atomic2) testDst2 := initTestTag("d2") - atomicDst2 := &atomicTerm{label: testDst2, toVal: "str2"} + atomicDst2 := &atomicTerm{property: testDst2, toVal: "str2"} conjDst2 = *conjDst2.add(atomicDst2) allowPath := SymbolicPath{conjSrc1, conjDst1} denyPath := SymbolicPath{conjSrc2, conjDst2} @@ -84,10 +84,10 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { conjAllow, conjDeny := Conjunction{}, Conjunction{} for i := 1; i <= 3; i++ { testAllow := initTestTag(fmt.Sprintf("s%v", i)) - atomicAllow := &atomicTerm{label: testAllow, toVal: fmt.Sprintf("str%v", i)} + atomicAllow := &atomicTerm{property: testAllow, toVal: fmt.Sprintf("str%v", i)} conjAllow = *conjAllow.add(atomicAllow) testDeny := initTestTag(fmt.Sprintf("s%v`", i)) - atomicDeny := &atomicTerm{label: testDeny, toVal: fmt.Sprintf("str%v`", i)} + atomicDeny := &atomicTerm{property: testDeny, toVal: fmt.Sprintf("str%v`", i)} conjDeny = *conjDeny.add(atomicDeny) } allowPath := SymbolicPath{conjAllow, conjAllow} @@ -121,7 +121,7 @@ func TestComputeAllowGivenDenyAllowTautology(t *testing.T) { conjDeny := Conjunction{} for i := 1; i <= 3; i++ { testDeny := initTestTag(fmt.Sprintf("s%v`", i)) - atomicDeny := &atomicTerm{label: testDeny, toVal: fmt.Sprintf("str%v`", i)} + atomicDeny := &atomicTerm{property: testDeny, toVal: fmt.Sprintf("str%v`", i)} conjDeny = *conjDeny.add(atomicDeny) } tautologyConj := Conjunction{tautology{}} @@ -146,7 +146,7 @@ func TestComputeAllowGivenDenyDenyTautology(t *testing.T) { conjAllow := Conjunction{} for i := 1; i <= 3; i++ { testAllow := initTestTag(fmt.Sprintf("s%v`", i)) - atomicAllow := &atomicTerm{label: testAllow, toVal: fmt.Sprintf("str%v`", i)} + atomicAllow := &atomicTerm{property: testAllow, toVal: fmt.Sprintf("str%v`", i)} conjAllow = *conjAllow.add(atomicAllow) } fmt.Printf("conjAllow is %v\nisEmptySet%v\n\n", conjAllow.string(), conjAllow.isEmptySet()) @@ -191,14 +191,14 @@ func TestComputeAllowGivenDenies(t *testing.T) { testSegment := initTestTag("segment") for i := 0; i < 3; i++ { if i < 2 { - atomicAllowSrc := &atomicTerm{label: testTag, toVal: fmt.Sprintf("t%v", 2*i)} - atomicAllowDst := &atomicTerm{label: testTag, toVal: fmt.Sprintf("t%v", 2*i+1)} + atomicAllowSrc := &atomicTerm{property: testTag, toVal: fmt.Sprintf("t%v", 2*i)} + atomicAllowDst := &atomicTerm{property: testTag, toVal: fmt.Sprintf("t%v", 2*i+1)} conjAllowSrc := Conjunction{atomicAllowSrc} conjAllowDst := Conjunction{atomicAllowDst} allowPaths = append(allowPaths, &SymbolicPath{conjAllowSrc, conjAllowDst}) } - atomicDenySrc := &atomicTerm{label: testSegment, toVal: fmt.Sprintf("s%v", 2*i)} - atomicDenyDst := &atomicTerm{label: testSegment, toVal: fmt.Sprintf("s%v", 2*i+1)} + atomicDenySrc := &atomicTerm{property: testSegment, toVal: fmt.Sprintf("s%v", 2*i)} + atomicDenyDst := &atomicTerm{property: testSegment, toVal: fmt.Sprintf("s%v", 2*i+1)} conjDenySrc := Conjunction{atomicDenySrc} conjDenyDst := Conjunction{atomicDenyDst} denyPaths = append(denyPaths, &SymbolicPath{conjDenySrc, conjDenyDst}) @@ -235,10 +235,10 @@ func TestComputeAllowGivenDenies(t *testing.T) { func TestAllowDenyOptimizeEmptyPath(t *testing.T) { conjSrc1, conjDst1 := Conjunction{}, Conjunction{} testSrc1 := initTestTag("s1") - atomic1 := &atomicTerm{label: testSrc1, toVal: "str1"} + atomic1 := &atomicTerm{property: testSrc1, toVal: "str1"} conjSrc1 = *conjSrc1.add(atomic1) testDst1 := initTestTag("d1") - atomicDst1 := &atomicTerm{label: testDst1, toVal: "str1"} + atomicDst1 := &atomicTerm{property: testDst1, toVal: "str1"} conjDst1 = *conjDst1.add(atomicDst1) allowPath := SymbolicPath{conjSrc1, Conjunction{tautology{}}} denyPath := SymbolicPath{conjSrc1, conjDst1} From f79b9ec19697f01c3fdb012c881ddf8825901222 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 17 Dec 2024 14:53:35 +0200 Subject: [PATCH 09/76] typo fix --- pkg/model/dfw/rule.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index a516c53..71063af 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -74,10 +74,7 @@ type FwRule struct { categoryRef *categorySpec dfwRef *DFW // clause of symbolic src abd symbolic dst - // todo: in order to compute these will have to maintain and use the (not yet exported) synthesis.AbstractModelSyn.atomics - // keep it there? - symbolicSrc []*symbolicexpr.SymbolicPath - symbolicDst []*symbolicexpr.SymbolicPath + symbolicPaths []*symbolicexpr.SymbolicPath // srcRuleObj ... todo: add a reference to the original rule retrieved from api } @@ -124,8 +121,7 @@ func (f *FwRule) getInboundRule() *FwRule { origRuleObj: f.origRuleObj, ruleID: f.ruleID, secPolicyName: f.secPolicyName, - symbolicSrc: []*symbolicexpr.SymbolicPath{}, // todo tmp - symbolicDst: []*symbolicexpr.SymbolicPath{}, // todo tmp + symbolicPaths: []*symbolicexpr.SymbolicPath{}, } } @@ -160,8 +156,7 @@ func (f *FwRule) getOutboundRule() *FwRule { origRuleObj: f.origRuleObj, ruleID: f.ruleID, secPolicyName: f.secPolicyName, - symbolicSrc: []*symbolicexpr.SymbolicPath{}, // todo tmp - symbolicDst: []*symbolicexpr.SymbolicPath{}, // todo tmp + symbolicPaths: []*symbolicexpr.SymbolicPath{}, } } @@ -194,7 +189,7 @@ func vmsString(vms []*endpoints.VM) string { // return a string representation of a single rule // groups are interpreted to VM members in this representation func (f *FwRule) string() string { - _, _ = f.symbolicSrc, f.symbolicDst // todo tmp for line + _ = f.symbolicPaths return fmt.Sprintf("ruleID: %d, src: %s, dst: %s, conn: %s, action: %s, direction: %s, scope: %s, sec-policy: %s", f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.conn.String(), string(f.action), f.direction, vmsString(f.scope), f.secPolicyName) } @@ -312,3 +307,8 @@ func (f *FwRule) originalRuleStr() string { common.Reset, ) } + +// ComputeSymbolic computes symbolicSrc and symbolicDst +func (f *FwRule) ComputeSymbolic() { + +} From 44ef5718ddb85de2bce66ef335b038b5206424a8 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 08:36:09 +0200 Subject: [PATCH 10/76] typo fix --- pkg/model/parser.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkg/model/parser.go b/pkg/model/parser.go index 1d9c311..2ad10d3 100644 --- a/pkg/model/parser.go +++ b/pkg/model/parser.go @@ -148,7 +148,7 @@ func (p *NSXConfigParser) getDFW() { } func (p *NSXConfigParser) addFWRule(r *parsedRule, category string, origRule *collector.Rule) { - p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.SrcGroups, r.DstGroup, r.conn, category, r.action, r.direction, r.ruleID, + p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.SrcGroups, r.DstGroups, r.conn, category, r.action, r.direction, r.ruleID, origRule, r.scope, r.secPolicyName, r.defaultRuleObj) } @@ -164,7 +164,7 @@ func (p *NSXConfigParser) getDefaultRule(secPolicy *collector.SecurityPolicy) *p res.srcVMs = vms res.dstVMs = vms res.SrcGroups = groups - res.DstGroup = groups + res.DstGroups = groups switch string(*secPolicy.ConnectivityPreference) { case string(nsx.SecurityPolicyConnectivityPreferenceALLOWLIST), @@ -189,9 +189,9 @@ type parsedRule struct { srcVMs []*endpoints.VM dstVMs []*endpoints.VM // todo: In this stage we are not analyzing the complete expr, yet. In this stage we will only handle src and dst - // defined by groups, thus the following SrcGroups and DstGroup + // defined by groups, thus the following SrcGroups and DstGroups SrcGroups []*collector.Group - DstGroup []*collector.Group + DstGroups []*collector.Group action string conn *netset.TransportSet direction string @@ -254,7 +254,7 @@ func (p *NSXConfigParser) getDFWRule(rule *collector.Rule) *parsedRule { // srcExclude := rule.SourcesExcluded res.srcVMs, res.SrcGroups = p.getEndpointsFromGroupsPaths(srcGroups) dstGroups := rule.DestinationGroups - res.dstVMs, res.DstGroup = p.getEndpointsFromGroupsPaths(dstGroups) + res.dstVMs, res.DstGroups = p.getEndpointsFromGroupsPaths(dstGroups) res.action = string(*rule.Action) res.conn = p.getRuleConnections(rule) From ac06b528ca851ca72fb74b294bca52b2b3840140 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 09:45:59 +0200 Subject: [PATCH 11/76] 1. Gather the relevant group info into parsedRule 2. symbolicPaths will be computed an d used only for synthesis; no need to have it in FWRule --- pkg/model/dfw/category.go | 7 ++----- pkg/model/dfw/rule.go | 13 ++++--------- pkg/model/parser.go | 26 +++++++++++++++++--------- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/pkg/model/dfw/category.go b/pkg/model/dfw/category.go index ce6cb3b..c54d3ad 100644 --- a/pkg/model/dfw/category.go +++ b/pkg/model/dfw/category.go @@ -8,7 +8,6 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/collector" "github.com/np-guard/vmware-analyzer/pkg/logging" "github.com/np-guard/vmware-analyzer/pkg/model/endpoints" - "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" ) // https://dp-downloads.broadcom.com/api-content/apis/API_NTDCRA_001/4.2/html/api_includes/types_SecurityPolicy.html @@ -187,8 +186,8 @@ func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups [] newRule := &FwRule{ srcVMs: src, dstVMs: dst, - srcGroups: srcGroups, - dstGroups: dstGroups, + SrcGroups: srcGroups, + DstGroups: dstGroups, conn: conn, action: actionFromString(action), direction: direction, @@ -200,8 +199,6 @@ func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups [] secPolicyCategory: c.category.string(), categoryRef: c, dfwRef: c.dfwRef, - symbolicSrc: []*symbolicexpr.SymbolicPath{}, - symbolicDst: []*symbolicexpr.SymbolicPath{}, } c.rules = append(c.rules, newRule) diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index 71063af..2c00c86 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -10,8 +10,6 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/common" "github.com/np-guard/vmware-analyzer/pkg/logging" "github.com/np-guard/vmware-analyzer/pkg/model/endpoints" - "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" - nsx "github.com/np-guard/vmware-analyzer/pkg/model/generated" ) @@ -61,8 +59,10 @@ type FwRule struct { srcVMs []*endpoints.VM dstVMs []*endpoints.VM scope []*endpoints.VM - srcGroups []*collector.Group - dstGroups []*collector.Group + SrcGroups []*collector.Group + IsAllSrcGroups bool + DstGroups []*collector.Group + IsAllDstGroups bool conn *netset.TransportSet action ruleAction direction string // "IN","OUT", "IN_OUT" @@ -73,8 +73,6 @@ type FwRule struct { secPolicyCategory string categoryRef *categorySpec dfwRef *DFW - // clause of symbolic src abd symbolic dst - symbolicPaths []*symbolicexpr.SymbolicPath // srcRuleObj ... todo: add a reference to the original rule retrieved from api } @@ -121,7 +119,6 @@ func (f *FwRule) getInboundRule() *FwRule { origRuleObj: f.origRuleObj, ruleID: f.ruleID, secPolicyName: f.secPolicyName, - symbolicPaths: []*symbolicexpr.SymbolicPath{}, } } @@ -156,7 +153,6 @@ func (f *FwRule) getOutboundRule() *FwRule { origRuleObj: f.origRuleObj, ruleID: f.ruleID, secPolicyName: f.secPolicyName, - symbolicPaths: []*symbolicexpr.SymbolicPath{}, } } @@ -189,7 +185,6 @@ func vmsString(vms []*endpoints.VM) string { // return a string representation of a single rule // groups are interpreted to VM members in this representation func (f *FwRule) string() string { - _ = f.symbolicPaths return fmt.Sprintf("ruleID: %d, src: %s, dst: %s, conn: %s, action: %s, direction: %s, scope: %s, sec-policy: %s", f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.conn.String(), string(f.action), f.direction, vmsString(f.scope), f.secPolicyName) } diff --git a/pkg/model/parser.go b/pkg/model/parser.go index 2ad10d3..1fd1e23 100644 --- a/pkg/model/parser.go +++ b/pkg/model/parser.go @@ -121,7 +121,7 @@ func (p *NSXConfigParser) getDFW() { r.scope = scope // scope from policy if !policyHasScope { // if policy scope is not configured, rule's scope takes effect - r.scope, _ = p.getEndpointsFromGroupsPaths(rule.Scope) + r.scope, r.scopeGroups = p.getEndpointsFromGroupsPaths(rule.Scope) } r.secPolicyName = *secPolicy.DisplayName p.addFWRule(r, category, rule) @@ -148,7 +148,7 @@ func (p *NSXConfigParser) getDFW() { } func (p *NSXConfigParser) addFWRule(r *parsedRule, category string, origRule *collector.Rule) { - p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.SrcGroups, r.DstGroups, r.conn, category, r.action, r.direction, r.ruleID, + p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.srcGroups, r.dstGroups, r.conn, category, r.action, r.direction, r.ruleID, origRule, r.scope, r.secPolicyName, r.defaultRuleObj) } @@ -163,8 +163,10 @@ func (p *NSXConfigParser) getDefaultRule(secPolicy *collector.SecurityPolicy) *p // rule applied as any-to-any only for ths VMs in the scope of the SecurityPolicy res.srcVMs = vms res.dstVMs = vms - res.SrcGroups = groups - res.DstGroups = groups + res.srcGroups = groups + res.isAllSrcGroups = true + res.dstGroups = groups + res.isAllSrcGroups = true switch string(*secPolicy.ConnectivityPreference) { case string(nsx.SecurityPolicyConnectivityPreferenceALLOWLIST), @@ -189,14 +191,18 @@ type parsedRule struct { srcVMs []*endpoints.VM dstVMs []*endpoints.VM // todo: In this stage we are not analyzing the complete expr, yet. In this stage we will only handle src and dst - // defined by groups, thus the following SrcGroups and DstGroups - SrcGroups []*collector.Group - DstGroups []*collector.Group + // defined by groups, thus the following temp 4 fields + srcGroups []*collector.Group + isAllSrcGroups bool + dstGroups []*collector.Group + isAllDstGroups bool action string conn *netset.TransportSet direction string ruleID int scope []*endpoints.VM + // todo: scopeGroups tmp same as srcGroups and fields above + scopeGroups []*collector.Group secPolicyName string defaultRuleObj *collector.FirewallRule } @@ -252,9 +258,11 @@ func (p *NSXConfigParser) getDFWRule(rule *collector.Rule) *parsedRule { // the source groups. If false, the rule applies to the source groups // TODO: handle excluded fields // srcExclude := rule.SourcesExcluded - res.srcVMs, res.SrcGroups = p.getEndpointsFromGroupsPaths(srcGroups) + res.srcVMs, res.srcGroups = p.getEndpointsFromGroupsPaths(srcGroups) + res.isAllSrcGroups = slices.Contains(srcGroups, anyStr) dstGroups := rule.DestinationGroups - res.dstVMs, res.DstGroups = p.getEndpointsFromGroupsPaths(dstGroups) + res.dstVMs, res.dstGroups = p.getEndpointsFromGroupsPaths(dstGroups) + res.isAllDstGroups = slices.Contains(dstGroups, anyStr) res.action = string(*rule.Action) res.conn = p.getRuleConnections(rule) From 5f06a8ca3fa8a915e0bf22c5c62e82ca6dd2b11a Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 10:38:32 +0200 Subject: [PATCH 12/76] added relevant group info to FWRule --- pkg/model/dfw/category.go | 9 ++++++--- pkg/model/dfw/dfw.go | 10 +++++----- pkg/model/dfw/rule.go | 9 ++++++--- pkg/model/parser.go | 4 ++-- 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/pkg/model/dfw/category.go b/pkg/model/dfw/category.go index c54d3ad..9794e74 100644 --- a/pkg/model/dfw/category.go +++ b/pkg/model/dfw/category.go @@ -180,14 +180,16 @@ func (c *categorySpec) outboundEffectiveRules() string { return strings.Join(rulesStr, lineSeparatorStr) } -func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups []*collector.Group, conn *netset.TransportSet, - action, direction string, ruleID int, origRule *collector.Rule, scope []*endpoints.VM, - secPolicyName string, origDefaultRule *collector.FirewallRule) { +func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroups []*collector.Group, + isAllSrcGroup, isAllDstGroup bool, conn *netset.TransportSet, action, direction string, ruleID int, + origRule *collector.Rule, scope []*endpoints.VM, secPolicyName string, origDefaultRule *collector.FirewallRule) { newRule := &FwRule{ srcVMs: src, dstVMs: dst, SrcGroups: srcGroups, + IsAllSrcGroups: isAllSrcGroup, DstGroups: dstGroups, + IsAllDstGroups: isAllDstGroup, conn: conn, action: actionFromString(action), direction: direction, @@ -195,6 +197,7 @@ func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups [] origRuleObj: origRule, origDefaultRuleObj: origDefaultRule, scope: scope, + ScopeGroups: scopeGroups, secPolicyName: secPolicyName, secPolicyCategory: c.category.string(), categoryRef: c, diff --git a/pkg/model/dfw/dfw.go b/pkg/model/dfw/dfw.go index 5c7c5a5..caacd17 100644 --- a/pkg/model/dfw/dfw.go +++ b/pkg/model/dfw/dfw.go @@ -114,13 +114,13 @@ func (d *DFW) AllEffectiveRules() string { // AddRule func for testing purposes -func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups []*collector.Group, conn *netset.TransportSet, - categoryStr, actionStr, direction string, ruleID int, origRule *collector.Rule, scope []*endpoints.VM, - secPolicyName string, origDefaultRule *collector.FirewallRule) { +func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroups []*collector.Group, + isAllSrcGroups, isAllDstGroups bool, conn *netset.TransportSet, categoryStr, actionStr, direction string, + ruleID int, origRule *collector.Rule, scope []*endpoints.VM, secPolicyName string, origDefaultRule *collector.FirewallRule) { for _, fwCategory := range d.categoriesSpecs { if fwCategory.category.string() == categoryStr { - fwCategory.addRule(src, dst, srcGroups, dstGroups, conn, actionStr, direction, ruleID, origRule, scope, - secPolicyName, origDefaultRule) + fwCategory.addRule(src, dst, srcGroups, dstGroups, scopeGroups, isAllSrcGroups, isAllDstGroups, conn, + actionStr, direction, ruleID, origRule, scope, secPolicyName, origDefaultRule) } } } diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index 2c00c86..276d93e 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -56,13 +56,16 @@ func actionFromString(s string) ruleAction { } type FwRule struct { - srcVMs []*endpoints.VM - dstVMs []*endpoints.VM - scope []*endpoints.VM + srcVMs []*endpoints.VM + dstVMs []*endpoints.VM + scope []*endpoints.VM + // todo: the following 5 fields are needed for the symbolic expr in synthesis, and are temp until we handle the + // entire expr properly SrcGroups []*collector.Group IsAllSrcGroups bool DstGroups []*collector.Group IsAllDstGroups bool + ScopeGroups []*collector.Group conn *netset.TransportSet action ruleAction direction string // "IN","OUT", "IN_OUT" diff --git a/pkg/model/parser.go b/pkg/model/parser.go index 1fd1e23..a2d5636 100644 --- a/pkg/model/parser.go +++ b/pkg/model/parser.go @@ -148,8 +148,8 @@ func (p *NSXConfigParser) getDFW() { } func (p *NSXConfigParser) addFWRule(r *parsedRule, category string, origRule *collector.Rule) { - p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.srcGroups, r.dstGroups, r.conn, category, r.action, r.direction, r.ruleID, - origRule, r.scope, r.secPolicyName, r.defaultRuleObj) + p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.srcGroups, r.dstGroups, r.scopeGroups, r.isAllSrcGroups, r.isAllDstGroups, + r.conn, category, r.action, r.direction, r.ruleID, origRule, r.scope, r.secPolicyName, r.defaultRuleObj) } func (p *NSXConfigParser) getDefaultRule(secPolicy *collector.SecurityPolicy) *parsedRule { From ab29c8ea198b8cd979e59341df81a44bb8ae379b Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 12:19:00 +0200 Subject: [PATCH 13/76] Scope added to FWRule --- pkg/model/dfw/rule.go | 51 ++++++++++++++++++++++++++----------------- 1 file changed, 31 insertions(+), 20 deletions(-) diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index 276d93e..180302d 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -61,10 +61,11 @@ type FwRule struct { scope []*endpoints.VM // todo: the following 5 fields are needed for the symbolic expr in synthesis, and are temp until we handle the // entire expr properly - SrcGroups []*collector.Group - IsAllSrcGroups bool - DstGroups []*collector.Group - IsAllDstGroups bool + SrcGroups []*collector.Group + IsAllSrcGroups bool + DstGroups []*collector.Group + IsAllDstGroups bool + // Scope implies additional condition on any Src and any Dst; will be added in one of the last stages ScopeGroups []*collector.Group conn *netset.TransportSet action ruleAction @@ -114,14 +115,19 @@ func (f *FwRule) getInboundRule() *FwRule { return nil } return &FwRule{ - srcVMs: f.srcVMs, - dstVMs: newDest, - conn: f.conn, - action: f.action, - direction: string(nsx.RuleDirectionIN), - origRuleObj: f.origRuleObj, - ruleID: f.ruleID, - secPolicyName: f.secPolicyName, + srcVMs: f.srcVMs, + dstVMs: newDest, + SrcGroups: f.SrcGroups, + DstGroups: f.DstGroups, + IsAllSrcGroups: f.IsAllSrcGroups, + IsAllDstGroups: f.IsAllDstGroups, + ScopeGroups: f.ScopeGroups, + conn: f.conn, + action: f.action, + direction: string(nsx.RuleDirectionIN), + origRuleObj: f.origRuleObj, + ruleID: f.ruleID, + secPolicyName: f.secPolicyName, } } @@ -148,14 +154,19 @@ func (f *FwRule) getOutboundRule() *FwRule { return nil } return &FwRule{ - srcVMs: newSrc, - dstVMs: f.dstVMs, - conn: f.conn, - action: f.action, - direction: string(nsx.RuleDirectionOUT), - origRuleObj: f.origRuleObj, - ruleID: f.ruleID, - secPolicyName: f.secPolicyName, + srcVMs: newSrc, + dstVMs: f.dstVMs, + SrcGroups: f.SrcGroups, + DstGroups: f.DstGroups, + IsAllSrcGroups: f.IsAllSrcGroups, + IsAllDstGroups: f.IsAllDstGroups, + ScopeGroups: f.ScopeGroups, + conn: f.conn, + action: f.action, + direction: string(nsx.RuleDirectionOUT), + origRuleObj: f.origRuleObj, + ruleID: f.ruleID, + secPolicyName: f.secPolicyName, } } From 57080c799759ee5bc649bce337ef206c5b26954f Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 13:00:32 +0200 Subject: [PATCH 14/76] Added test printing and exported fields to be used by synthesis --- pkg/model/config.go | 12 +++---- pkg/model/connectivity_test.go | 4 +-- pkg/model/dfw/category.go | 58 +++++++++++++++++----------------- pkg/model/dfw/dfw.go | 44 +++++++++++++------------- pkg/model/dfw/rule.go | 30 +++++++++--------- pkg/model/parser.go | 8 ++--- pkg/synthesis/synthesis.go | 33 +++++++++++++++++-- 7 files changed, 109 insertions(+), 80 deletions(-) diff --git a/pkg/model/config.go b/pkg/model/config.go index 32380cf..fbf6c69 100644 --- a/pkg/model/config.go +++ b/pkg/model/config.go @@ -13,7 +13,7 @@ import ( type config struct { vms []*endpoints.VM // list of all vms vmsMap map[string]*endpoints.VM // map from uid to vm objects - fw *dfw.DFW // currently assuming one DFW only (todo: rename pkg dfw) + Fw *dfw.DFW // currently assuming one DFW only (todo: rename pkg dfw) analyzedConnectivity connMap // the resulting connectivity map from analyzing this configuration analysisDone bool } @@ -29,14 +29,14 @@ func (c *config) ComputeConnectivity(vmsFilter []string) { logging.Debugf("compute connectivity on parsed config") res := connMap{} // make sure all vm pairs are in the result, by init with global default - res.initPairs(c.fw.GlobalDefaultAllow(), c.vms, vmsFilter) + res.initPairs(c.Fw.GlobalDefaultAllow(), c.vms, vmsFilter) // iterate over all vm pairs in the initialized map at res, get the analysis result per pair for src, srcMap := range res { for dst := range srcMap { if src == dst { continue } - conn := c.fw.AllowedConnections(src, dst) + conn := c.Fw.AllowedConnections(src, dst) res.add(src, dst, conn) } } @@ -55,11 +55,11 @@ func (c *config) getConfigInfoStr() string { sb.WriteString(common.OutputSectionSep) sb.WriteString("DFW:\n") - sb.WriteString(c.fw.OriginalRulesStrFormatted()) + sb.WriteString(c.Fw.OriginalRulesStrFormatted()) sb.WriteString(common.ShortSep) - sb.WriteString(c.fw.String()) + sb.WriteString(c.Fw.String()) sb.WriteString(common.ShortSep) - sb.WriteString(c.fw.AllEffectiveRules()) + sb.WriteString(c.Fw.AllEffectiveRules()) sb.WriteString(common.OutputSectionSep) return sb.String() diff --git a/pkg/model/connectivity_test.go b/pkg/model/connectivity_test.go index 3b1808b..a6aad97 100644 --- a/pkg/model/connectivity_test.go +++ b/pkg/model/connectivity_test.go @@ -26,12 +26,12 @@ var dfwAllowAllByDefault = dfw.NewEmptyDFW(true) // no rules and global def // basic test var config1 = &config{ vms: allVms, - fw: dfwAllowNothingByDefault, + Fw: dfwAllowNothingByDefault, } var config2 = &config{ vms: allVms, - fw: dfwAllowAllByDefault, + Fw: dfwAllowAllByDefault, } func sumPairs(c connMap) int { diff --git a/pkg/model/dfw/category.go b/pkg/model/dfw/category.go index 9794e74..b538a81 100644 --- a/pkg/model/dfw/category.go +++ b/pkg/model/dfw/category.go @@ -74,30 +74,30 @@ var categoriesList = []DfwCategory{ ethernetCategory, emergencyCategory, infrastructureCategory, envCategory, appCategoty, emptyCategory, } -// effectiveRules are built from original rules, split to separate inbound & outbound rules +// EffectiveRules are built from original rules, split to separate Inbound & Outbound rules // consider already the scope from the original rules -type effectiveRules struct { - inbound []*FwRule - outbound []*FwRule +type EffectiveRules struct { + Inbound []*FwRule + Outbound []*FwRule } -func (e *effectiveRules) addInboundRule(r *FwRule) { +func (e *EffectiveRules) addInboundRule(r *FwRule) { if r != nil { - e.inbound = append(e.inbound, r) + e.Inbound = append(e.Inbound, r) } } -func (e *effectiveRules) addOutboundRule(r *FwRule) { +func (e *EffectiveRules) addOutboundRule(r *FwRule) { if r != nil { - e.outbound = append(e.outbound, r) + e.Outbound = append(e.Outbound, r) } } type categorySpec struct { - category DfwCategory + Category DfwCategory rules []*FwRule // ordered list of rules - defaultAction ruleAction - processedRules *effectiveRules // ordered list of effective rules + defaultAction RuleAction + ProcessedRules *EffectiveRules // ordered list of effective rules dfwRef *DFW } @@ -112,13 +112,13 @@ type categorySpec struct { func (c *categorySpec) analyzeCategory(src, dst *endpoints.VM, isIngress bool, ) (allowedConns, jumpToAppConns, deniedConns, nonDet *netset.TransportSet) { allowedConns, jumpToAppConns, deniedConns = netset.NoTransports(), netset.NoTransports(), netset.NoTransports() - rules := c.processedRules.inbound // inbound effective rules + rules := c.ProcessedRules.Inbound // inbound effective rules if !isIngress { - rules = c.processedRules.outbound // outbound effective rules + rules = c.ProcessedRules.Outbound // outbound effective rules } for _, rule := range rules /*c.rules*/ { if rule.processedRuleCapturesPair(src, dst) /*rule.capturesPair(src, dst, isIngress)*/ { - switch rule.action { + switch rule.Action { case actionAllow: addedAllowedConns := rule.conn.Subtract(deniedConns).Subtract(jumpToAppConns) allowedConns = allowedConns.Union(addedAllowedConns) @@ -160,22 +160,22 @@ func (c *categorySpec) string() string { for i := range c.rules { rulesStr[i+1] = c.rules[i].string() } - return fmt.Sprintf("category: %s\n%s\ndefault action: %s", c.category.string(), + return fmt.Sprintf("category: %s\n%s\ndefault action: %s", c.Category.string(), strings.Join(rulesStr, lineSeparatorStr), string(c.defaultAction)) } func (c *categorySpec) inboundEffectiveRules() string { - rulesStr := make([]string, len(c.processedRules.inbound)) - for i := range c.processedRules.inbound { - rulesStr[i] = c.processedRules.inbound[i].effectiveRuleStr() + rulesStr := make([]string, len(c.ProcessedRules.Inbound)) + for i := range c.ProcessedRules.Inbound { + rulesStr[i] = c.ProcessedRules.Inbound[i].effectiveRuleStr() } return strings.Join(rulesStr, lineSeparatorStr) } func (c *categorySpec) outboundEffectiveRules() string { - rulesStr := make([]string, len(c.processedRules.outbound)) - for i := range c.processedRules.outbound { - rulesStr[i] = c.processedRules.outbound[i].effectiveRuleStr() + rulesStr := make([]string, len(c.ProcessedRules.Outbound)) + for i := range c.ProcessedRules.Outbound { + rulesStr[i] = c.ProcessedRules.Outbound[i].effectiveRuleStr() } return strings.Join(rulesStr, lineSeparatorStr) } @@ -191,7 +191,7 @@ func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups, s DstGroups: dstGroups, IsAllDstGroups: isAllDstGroup, conn: conn, - action: actionFromString(action), + Action: actionFromString(action), direction: direction, ruleID: ruleID, origRuleObj: origRule, @@ -199,26 +199,26 @@ func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups, s scope: scope, ScopeGroups: scopeGroups, secPolicyName: secPolicyName, - secPolicyCategory: c.category.string(), + secPolicyCategory: c.Category.string(), categoryRef: c, dfwRef: c.dfwRef, } c.rules = append(c.rules, newRule) inbound, outbound := newRule.effectiveRules() - if c.category != ethernetCategory { - c.processedRules.addInboundRule(inbound) - c.processedRules.addOutboundRule(outbound) + if c.Category != ethernetCategory { + c.ProcessedRules.addInboundRule(inbound) + c.ProcessedRules.addOutboundRule(outbound) } else { - logging.Debugf("rule %d in ethernet category is ignored and not added to list of effective rules", ruleID) + logging.Debugf("rule %d in ethernet Category is ignored and not added to list of effective rules", ruleID) } } func newEmptyCategory(c DfwCategory, d *DFW) *categorySpec { return &categorySpec{ - category: c, + Category: c, dfwRef: d, defaultAction: actionNone, - processedRules: &effectiveRules{}, + ProcessedRules: &EffectiveRules{}, } } diff --git a/pkg/model/dfw/dfw.go b/pkg/model/dfw/dfw.go index caacd17..9180b9e 100644 --- a/pkg/model/dfw/dfw.go +++ b/pkg/model/dfw/dfw.go @@ -13,8 +13,8 @@ import ( ) type DFW struct { - categoriesSpecs []*categorySpec // ordered list of categories - defaultAction ruleAction // global default (?) + CategoriesSpecs []*categorySpec // ordered list of categories + defaultAction RuleAction // global default (?) pathsToDisplayNames map[string]string // map from printing paths references as display names instead } @@ -36,8 +36,8 @@ func (d *DFW) AllowedConnectionsIngressOrEgress(src, dst *endpoints.VM, isIngres allDeniedConns := netset.NoTransports() allNotDeterminedConns := netset.NoTransports() - for _, dfwCategory := range d.categoriesSpecs { - if dfwCategory.category == ethernetCategory { + for _, dfwCategory := range d.CategoriesSpecs { + if dfwCategory.Category == ethernetCategory { continue // cuurently skip L2 rules } // get analyzed conns from this category @@ -75,7 +75,7 @@ func (d *DFW) OriginalRulesStrFormatted() string { writer := tabwriter.NewWriter(&builder, 1, 1, 1, ' ', tabwriter.Debug) fmt.Fprintln(writer, "original rules:") fmt.Fprintln(writer, getRulesFormattedHeaderLine()) - for _, c := range d.categoriesSpecs { + for _, c := range d.CategoriesSpecs { for _, ruleStr := range c.originalRulesStr() { if ruleStr == "" { continue @@ -89,9 +89,9 @@ func (d *DFW) OriginalRulesStrFormatted() string { // return a string rep that shows the fw-rules in all categories func (d *DFW) String() string { - categoriesStrings := make([]string, len(d.categoriesSpecs)) - for i := range d.categoriesSpecs { - categoriesStrings[i] = d.categoriesSpecs[i].string() + categoriesStrings := make([]string, len(d.CategoriesSpecs)) + for i := range d.CategoriesSpecs { + categoriesStrings[i] = d.CategoriesSpecs[i].string() } return strings.Join(categoriesStrings, lineSeparatorStr) } @@ -99,12 +99,12 @@ func (d *DFW) String() string { func (d *DFW) AllEffectiveRules() string { inboundRes := []string{} outboundRes := []string{} - for i := range d.categoriesSpecs { - if len(d.categoriesSpecs[i].processedRules.inbound) > 0 { - inboundRes = append(inboundRes, d.categoriesSpecs[i].inboundEffectiveRules()) + for i := range d.CategoriesSpecs { + if len(d.CategoriesSpecs[i].ProcessedRules.Inbound) > 0 { + inboundRes = append(inboundRes, d.CategoriesSpecs[i].inboundEffectiveRules()) } - if len(d.categoriesSpecs[i].processedRules.outbound) > 0 { - outboundRes = append(outboundRes, d.categoriesSpecs[i].outboundEffectiveRules()) + if len(d.CategoriesSpecs[i].ProcessedRules.Outbound) > 0 { + outboundRes = append(outboundRes, d.CategoriesSpecs[i].outboundEffectiveRules()) } } inbound := fmt.Sprintf("\nInbound effective rules only:%s%s\n", common.ShortSep, strings.Join(inboundRes, lineSeparatorStr)) @@ -117,8 +117,8 @@ func (d *DFW) AllEffectiveRules() string { func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroups []*collector.Group, isAllSrcGroups, isAllDstGroups bool, conn *netset.TransportSet, categoryStr, actionStr, direction string, ruleID int, origRule *collector.Rule, scope []*endpoints.VM, secPolicyName string, origDefaultRule *collector.FirewallRule) { - for _, fwCategory := range d.categoriesSpecs { - if fwCategory.category.string() == categoryStr { + for _, fwCategory := range d.CategoriesSpecs { + if fwCategory.Category.string() == categoryStr { fwCategory.addRule(src, dst, srcGroups, dstGroups, scopeGroups, isAllSrcGroups, isAllDstGroups, conn, actionStr, direction, ruleID, origRule, scope, secPolicyName, origDefaultRule) } @@ -127,23 +127,23 @@ func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroup /*func (d *DFW) AddRule(src, dst []*endpoints.VM, conn *netset.TransportSet, categoryStr string, actionStr string) { var categoryObj *categorySpec - for _, c := range d.categoriesSpecs { - if c.category.string() == categoryStr { + for _, c := range d.CategoriesSpecs { + if c.Category.string() == categoryStr { categoryObj = c } } - if categoryObj == nil { // create new category if missing + if categoryObj == nil { // create new Category if missing categoryObj = &categorySpec{ - category: dfwCategoryFromString(categoryStr), + Category: dfwCategoryFromString(categoryStr), } - d.categoriesSpecs = append(d.categoriesSpecs, categoryObj) + d.CategoriesSpecs = append(d.CategoriesSpecs, categoryObj) } newRule := &FwRule{ srcVMs: src, dstVMs: dst, conn: netset.All(), // todo: change - action: actionFromString(actionStr), + Action: actionFromString(actionStr), } categoryObj.rules = append(categoryObj.rules, newRule) }*/ @@ -157,7 +157,7 @@ func NewEmptyDFW(globalDefaultAllow bool) *DFW { res.defaultAction = actionAllow } for _, c := range categoriesList { - res.categoriesSpecs = append(res.categoriesSpecs, newEmptyCategory(c, res)) + res.CategoriesSpecs = append(res.CategoriesSpecs, newEmptyCategory(c, res)) } return res } diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index 180302d..4c95b6d 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -13,7 +13,7 @@ import ( nsx "github.com/np-guard/vmware-analyzer/pkg/model/generated" ) -type ruleAction string +type RuleAction string const ( listSeparatorStr = "," @@ -24,13 +24,13 @@ const ( var ingressDirections = []string{"IN", "IN_OUT"}*/ const ( - actionAllow ruleAction = "allow" - actionDeny ruleAction = "deny" // currently not differentiating between "reject" and "drop" - actionJumpToApp ruleAction = "jump_to_application" - actionNone ruleAction = "none" // to mark that a default rule is not configured + actionAllow RuleAction = "allow" + actionDeny RuleAction = "deny" // currently not differentiating between "reject" and "drop" + actionJumpToApp RuleAction = "jump_to_application" + actionNone RuleAction = "none" // to mark that a default rule is not configured ) -/*func actionFromString(input string) ruleAction { +/*func actionFromString(input string) RuleAction { switch input { case string(actionAllow): return actionAllow @@ -42,7 +42,7 @@ const ( return actionDeny }*/ -func actionFromString(s string) ruleAction { +func actionFromString(s string) RuleAction { switch strings.ToLower(s) { case string(actionAllow): return actionAllow @@ -68,7 +68,7 @@ type FwRule struct { // Scope implies additional condition on any Src and any Dst; will be added in one of the last stages ScopeGroups []*collector.Group conn *netset.TransportSet - action ruleAction + Action RuleAction direction string // "IN","OUT", "IN_OUT" origRuleObj *collector.Rule origDefaultRuleObj *collector.FirewallRule @@ -123,7 +123,7 @@ func (f *FwRule) getInboundRule() *FwRule { IsAllDstGroups: f.IsAllDstGroups, ScopeGroups: f.ScopeGroups, conn: f.conn, - action: f.action, + Action: f.Action, direction: string(nsx.RuleDirectionIN), origRuleObj: f.origRuleObj, ruleID: f.ruleID, @@ -162,7 +162,7 @@ func (f *FwRule) getOutboundRule() *FwRule { IsAllDstGroups: f.IsAllDstGroups, ScopeGroups: f.ScopeGroups, conn: f.conn, - action: f.action, + Action: f.Action, direction: string(nsx.RuleDirectionOUT), origRuleObj: f.origRuleObj, ruleID: f.ruleID, @@ -200,12 +200,12 @@ func vmsString(vms []*endpoints.VM) string { // groups are interpreted to VM members in this representation func (f *FwRule) string() string { return fmt.Sprintf("ruleID: %d, src: %s, dst: %s, conn: %s, action: %s, direction: %s, scope: %s, sec-policy: %s", - f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.conn.String(), string(f.action), f.direction, vmsString(f.scope), f.secPolicyName) + f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.conn.String(), string(f.Action), f.direction, vmsString(f.scope), f.secPolicyName) } func (f *FwRule) effectiveRuleStr() string { return fmt.Sprintf("ruleID: %d, src: %s, dst: %s, conn: %s, action: %s, direction: %s, sec-policy: %s", - f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.conn.String(), string(f.action), f.direction, f.secPolicyName) + f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.conn.String(), string(f.Action), f.direction, f.secPolicyName) } func getDefaultRuleScope(r *collector.FirewallRule) string { @@ -253,11 +253,11 @@ func getRulesFormattedHeaderLine() string { "src", "dst", "conn", - "action", + "Action", "direction", "scope", "sec-policy", - "category", + "Category", } return fmt.Sprintf("%s%s%s", common.Red, @@ -309,7 +309,7 @@ func (f *FwRule) originalRuleStr() string { f.getShortPathsString(f.origRuleObj.DestinationGroups), // todo: origRuleObj.Services is not always the services, can also be service_entries f.getShortPathsString(f.origRuleObj.Services), - string(f.action), f.direction, + string(f.Action), f.direction, strings.Join(f.origRuleObj.Scope, listSeparatorStr), f.secPolicyName, f.secPolicyCategory, diff --git a/pkg/model/parser.go b/pkg/model/parser.go index a2d5636..315ff8a 100644 --- a/pkg/model/parser.go +++ b/pkg/model/parser.go @@ -44,7 +44,7 @@ type NSXConfigParser struct { configRes *config groups []*collector.Group allGroupsVMs []*endpoints.VM - // store references to groups/services objects from paths used in fw rules + // store references to groups/services objects from paths used in Fw rules groupPathsToObjects map[string]*collector.Group servicePathsToObjects map[string]*collector.Service } @@ -72,7 +72,7 @@ func (p *NSXConfigParser) AddPathsToDisplayNames() { for sPath, sObj := range p.servicePathsToObjects { res[sPath] = *sObj.DisplayName } - p.configRes.fw.SetPathsToDisplayNames(res) + p.configRes.Fw.SetPathsToDisplayNames(res) } // getVMs assigns the parsed VM objects from the NSX resources container into the res config object @@ -99,7 +99,7 @@ func (p *NSXConfigParser) getVMs() { } func (p *NSXConfigParser) getDFW() { - p.configRes.fw = dfw.NewEmptyDFW(false) // TODO: what is global default? + p.configRes.Fw = dfw.NewEmptyDFW(false) // TODO: what is global default? for i := range p.rc.DomainList { domainRsc := p.rc.DomainList[i].Resources for j := range domainRsc.SecurityPolicyList { @@ -148,7 +148,7 @@ func (p *NSXConfigParser) getDFW() { } func (p *NSXConfigParser) addFWRule(r *parsedRule, category string, origRule *collector.Rule) { - p.configRes.fw.AddRule(r.srcVMs, r.dstVMs, r.srcGroups, r.dstGroups, r.scopeGroups, r.isAllSrcGroups, r.isAllDstGroups, + p.configRes.Fw.AddRule(r.srcVMs, r.dstVMs, r.srcGroups, r.dstGroups, r.scopeGroups, r.isAllSrcGroups, r.isAllDstGroups, r.conn, category, r.action, r.direction, r.ruleID, origRule, r.scope, r.secPolicyName, r.defaultRuleObj) } diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index a812a0e..39ec313 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -1,8 +1,11 @@ package synthesis import ( + "fmt" "github.com/np-guard/vmware-analyzer/pkg/collector" "github.com/np-guard/vmware-analyzer/pkg/model" + "github.com/np-guard/vmware-analyzer/pkg/model/dfw" + "strings" ) func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.OutputParameters) (string, error) { @@ -12,8 +15,34 @@ func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.Out return "", err } config := parser.GetConfig() + for _, category := range config.Fw.CategoriesSpecs { + if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { + fmt.Printf("no rules in category %v\n", category.Category) + continue + } + fmt.Printf("\ncategory: %v\n===============\n", category.Category) + fmt.Println("Outbound rules:") + printRules(category.ProcessedRules.Outbound) + fmt.Println("Inbound rules:") + printRules(category.ProcessedRules.Inbound) + } + return "", nil +} - _ = config +func printRules(rules []*dfw.FwRule) { + for _, rule := range rules { + fmt.Printf("\taction %v SourceGroups: %v DestinationGroups: %v\n", rule.Action, + getGroupsStr(rule.SrcGroups, rule.IsAllSrcGroups), getGroupsStr(rule.DstGroups, rule.IsAllDstGroups)) + } +} - return "", nil +func getGroupsStr(groups []*collector.Group, isAll bool) string { + if isAll { + return "Any" + } + groupsStr := make([]string, len(groups)) + for i, group := range groups { + groupsStr[i] = *group.DisplayName + } + return strings.Join(groupsStr, ", ") } From e17dfd667f69426cf1093edae6152f1dff0058da Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 13:20:41 +0200 Subject: [PATCH 15/76] update model --- pkg/symbolicexpr/model.go | 1 + pkg/synthesis/model.go | 13 +++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/pkg/symbolicexpr/model.go b/pkg/symbolicexpr/model.go index a87d842..6adbe35 100644 --- a/pkg/symbolicexpr/model.go +++ b/pkg/symbolicexpr/model.go @@ -39,6 +39,7 @@ type Conjunction []atomic type SymbolicPath struct { Src Conjunction Dst Conjunction + // ToDo: add Conn } type SymbolicPaths []*SymbolicPath diff --git a/pkg/synthesis/model.go b/pkg/synthesis/model.go index 2e6f106..333ddae 100644 --- a/pkg/synthesis/model.go +++ b/pkg/synthesis/model.go @@ -25,18 +25,19 @@ type Tags map[string]*collector.Tag // //nolint:all // todo: tmp for defs without implementation type RuleForSynthesis struct { // original rule - origRule dfw.FwRule // original rule - // category; needed for interpreting path + origRule *dfw.FwRule // original rule + // category; for reference, e.g. in the labels or documentation of the synthesized objects // a pass rule is interpreted as deny for the current category - category dfw.DfwCategory + origRuleCategory dfw.DfwCategory // The following refers to conversion of original allow rule to symbolic paths, as follows: - // // Assuming there are only allow (non-prioritized, of course) rules. // This is relevant only for allow rules (nil otherwise) - allowOnlyRulePaths symbolicexpr.SymbolicPaths + allowOnlyRulePaths symbolicexpr.SymbolicPaths + allowOnlyEffectingRules []*dfw.FwRule // rules effecting allowOnlyRulePaths (potentially higher priority pass and deny) // Assuming there are prioritized allow and deny rules (but no categories and pass) // This is relevant for allow and deny rules (pass nil), priorities are the same as of the original rules - allowAndDenyRulesPaths symbolicexpr.SymbolicPaths + allowAndDenyRulesPaths symbolicexpr.SymbolicPaths + allowAndDenyEffectingRules []*dfw.FwRule // rules effecting allowAndDenyRulesPaths (potentially higher priority pass) } //nolint:all // todo: tmp for defs without implementation From a583ac298abc47644dea122bdd6dd5ade1e07414 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 13:56:46 +0200 Subject: [PATCH 16/76] code for convertFWRuleToSymbolicPaths --- pkg/symbolicexpr/model.go | 1 + pkg/symbolicexpr/symbolicPath.go | 38 ++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/pkg/symbolicexpr/model.go b/pkg/symbolicexpr/model.go index 6adbe35..9e1767d 100644 --- a/pkg/symbolicexpr/model.go +++ b/pkg/symbolicexpr/model.go @@ -45,4 +45,5 @@ type SymbolicPath struct { type SymbolicPaths []*SymbolicPath // Atomics map from Atomics string to *atomicTerm +// todo: to use for cashing type Atomics map[string]atomic diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index bdff43b..e702911 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -3,6 +3,9 @@ package symbolicexpr import ( "fmt" "strings" + + "github.com/np-guard/vmware-analyzer/pkg/collector" + "github.com/np-guard/vmware-analyzer/pkg/model/dfw" ) func (path *SymbolicPath) string() string { @@ -93,3 +96,38 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli } return &resAllowPaths } + +func convertFWRuleToSymbolicPaths(rule *dfw.FwRule) SymbolicPaths { + resSymbolicPaths := SymbolicPaths{} + any := Conjunction{tautology{}} + srcTerms := getAtomicTermsForGroups(rule.SrcGroups) + dstTerms := getAtomicTermsForGroups(rule.DstGroups) + switch { + case rule.IsAllSrcGroups && rule.IsAllDstGroups: + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{any, any}) + case rule.IsAllSrcGroups: + for _, dstTerm := range dstTerms { + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{any, Conjunction{dstTerm}}) + } + case rule.IsAllDstGroups: + for _, srcTerm := range srcTerms { + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Conjunction{srcTerm}, any}) + } + default: + for _, srcTerm := range srcTerms { + for _, dstTerm := range dstTerms { + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Conjunction{srcTerm}, Conjunction{dstTerm}}) + } + } + } + return resSymbolicPaths +} + +// todo: handling only "in group" in this stage +func getAtomicTermsForGroups(groups []*collector.Group) []*atomicTerm { + res := make([]*atomicTerm, len(groups)) + for i, group := range groups { + res[i] = &atomicTerm{property: group, toVal: *group.DisplayName, neg: false} + } + return res +} From 7903b88c2feb20f2f5d1d1f9593a30f8a5f153ea Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 16:20:34 +0200 Subject: [PATCH 17/76] print path conversion result --- pkg/model/dfw/category.go | 18 +++++++++--------- pkg/model/dfw/dfw.go | 6 +++--- pkg/model/dfw/rule.go | 2 +- pkg/symbolicexpr/atomic.go | 4 ++-- pkg/symbolicexpr/symbolicPath.go | 7 ++++--- pkg/symbolicexpr/symbolicexpr_test.go | 26 +++++++++++++------------- pkg/synthesis/synthesis.go | 19 ++++++++++++++----- 7 files changed, 46 insertions(+), 36 deletions(-) diff --git a/pkg/model/dfw/category.go b/pkg/model/dfw/category.go index b538a81..3616670 100644 --- a/pkg/model/dfw/category.go +++ b/pkg/model/dfw/category.go @@ -93,7 +93,7 @@ func (e *EffectiveRules) addOutboundRule(r *FwRule) { } } -type categorySpec struct { +type CategorySpec struct { Category DfwCategory rules []*FwRule // ordered list of rules defaultAction RuleAction @@ -109,7 +109,7 @@ type categorySpec struct { // todo: may possibly eliminate jumpToAppConns and unify them with notDeterminedConns // //nolint:gocritic // for now keep commentedOutCode -func (c *categorySpec) analyzeCategory(src, dst *endpoints.VM, isIngress bool, +func (c *CategorySpec) analyzeCategory(src, dst *endpoints.VM, isIngress bool, ) (allowedConns, jumpToAppConns, deniedConns, nonDet *netset.TransportSet) { allowedConns, jumpToAppConns, deniedConns = netset.NoTransports(), netset.NoTransports(), netset.NoTransports() rules := c.ProcessedRules.Inbound // inbound effective rules @@ -146,7 +146,7 @@ func (c *categorySpec) analyzeCategory(src, dst *endpoints.VM, isIngress bool, return allowedConns, jumpToAppConns, deniedConns, nonDet } -func (c *categorySpec) originalRulesStr() []string { +func (c *CategorySpec) originalRulesStr() []string { rulesStr := make([]string, len(c.rules)) for i := range c.rules { rulesStr[i] = c.rules[i].originalRuleStr() @@ -154,7 +154,7 @@ func (c *categorySpec) originalRulesStr() []string { return rulesStr } -func (c *categorySpec) string() string { +func (c *CategorySpec) string() string { rulesStr := make([]string, len(c.rules)+1) rulesStr[0] = "rules:" for i := range c.rules { @@ -164,7 +164,7 @@ func (c *categorySpec) string() string { strings.Join(rulesStr, lineSeparatorStr), string(c.defaultAction)) } -func (c *categorySpec) inboundEffectiveRules() string { +func (c *CategorySpec) inboundEffectiveRules() string { rulesStr := make([]string, len(c.ProcessedRules.Inbound)) for i := range c.ProcessedRules.Inbound { rulesStr[i] = c.ProcessedRules.Inbound[i].effectiveRuleStr() @@ -172,7 +172,7 @@ func (c *categorySpec) inboundEffectiveRules() string { return strings.Join(rulesStr, lineSeparatorStr) } -func (c *categorySpec) outboundEffectiveRules() string { +func (c *CategorySpec) outboundEffectiveRules() string { rulesStr := make([]string, len(c.ProcessedRules.Outbound)) for i := range c.ProcessedRules.Outbound { rulesStr[i] = c.ProcessedRules.Outbound[i].effectiveRuleStr() @@ -180,7 +180,7 @@ func (c *categorySpec) outboundEffectiveRules() string { return strings.Join(rulesStr, lineSeparatorStr) } -func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroups []*collector.Group, +func (c *CategorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroups []*collector.Group, isAllSrcGroup, isAllDstGroup bool, conn *netset.TransportSet, action, direction string, ruleID int, origRule *collector.Rule, scope []*endpoints.VM, secPolicyName string, origDefaultRule *collector.FirewallRule) { newRule := &FwRule{ @@ -214,8 +214,8 @@ func (c *categorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups, s } } -func newEmptyCategory(c DfwCategory, d *DFW) *categorySpec { - return &categorySpec{ +func newEmptyCategory(c DfwCategory, d *DFW) *CategorySpec { + return &CategorySpec{ Category: c, dfwRef: d, defaultAction: actionNone, diff --git a/pkg/model/dfw/dfw.go b/pkg/model/dfw/dfw.go index 9180b9e..a076259 100644 --- a/pkg/model/dfw/dfw.go +++ b/pkg/model/dfw/dfw.go @@ -13,7 +13,7 @@ import ( ) type DFW struct { - CategoriesSpecs []*categorySpec // ordered list of categories + CategoriesSpecs []*CategorySpec // ordered list of categories defaultAction RuleAction // global default (?) pathsToDisplayNames map[string]string // map from printing paths references as display names instead @@ -126,14 +126,14 @@ func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroup } /*func (d *DFW) AddRule(src, dst []*endpoints.VM, conn *netset.TransportSet, categoryStr string, actionStr string) { - var categoryObj *categorySpec + var categoryObj *CategorySpec for _, c := range d.CategoriesSpecs { if c.Category.string() == categoryStr { categoryObj = c } } if categoryObj == nil { // create new Category if missing - categoryObj = &categorySpec{ + categoryObj = &CategorySpec{ Category: dfwCategoryFromString(categoryStr), } d.CategoriesSpecs = append(d.CategoriesSpecs, categoryObj) diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index 4c95b6d..832389c 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -75,7 +75,7 @@ type FwRule struct { ruleID int secPolicyName string secPolicyCategory string - categoryRef *categorySpec + categoryRef *CategorySpec dfwRef *DFW // srcRuleObj ... todo: add a reference to the original rule retrieved from api diff --git a/pkg/symbolicexpr/atomic.go b/pkg/symbolicexpr/atomic.go index da3b37a..3ca94d0 100644 --- a/pkg/symbolicexpr/atomic.go +++ b/pkg/symbolicexpr/atomic.go @@ -17,11 +17,11 @@ func (term atomicTerm) string() string { case *endpoints.VM: labelType = "virtual machine " case *collector.Tag: - labelType = "tag " + labelType = "tag " + term.property.Name() case *collector.Group: labelType = "group " } - return labelType + term.property.Name() + equalSign + term.toVal + return labelType + equalSign + term.toVal } func NewAtomicTerm(label vmProperty, toVal string, neg bool) *atomicTerm { diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index e702911..117ca82 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -17,7 +17,7 @@ func (path *SymbolicPath) isEmpty() bool { return path.Src.isEmptySet() || path.Dst.isEmptySet() } -func (paths *SymbolicPaths) string() string { +func (paths *SymbolicPaths) String() string { if len(*paths) == 0 { return emptySet } @@ -97,7 +97,8 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli return &resAllowPaths } -func convertFWRuleToSymbolicPaths(rule *dfw.FwRule) SymbolicPaths { +// ConvertFWRuleToSymbolicPaths given a rule, converts its src, dst and conn to SymbolicPaths +func ConvertFWRuleToSymbolicPaths(rule *dfw.FwRule) *SymbolicPaths { resSymbolicPaths := SymbolicPaths{} any := Conjunction{tautology{}} srcTerms := getAtomicTermsForGroups(rule.SrcGroups) @@ -120,7 +121,7 @@ func convertFWRuleToSymbolicPaths(rule *dfw.FwRule) SymbolicPaths { } } } - return resSymbolicPaths + return &resSymbolicPaths } // todo: handling only "in group" in this stage diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 08c64e0..ac1537a 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -63,9 +63,9 @@ func TestComputeAllowGivenDenySingleTermEach(t *testing.T) { denyPath := SymbolicPath{conjSrc2, conjDst2} fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) - fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.string()) + fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, "(s1 = str1 and s2 != str2) to (d1 = str1)\n(s1 = str1) to (d1 = str1 and d2 != str2)", - allowGivenDeny.string(), "allowGivenDeny single term computation not as expected") + allowGivenDeny.String(), "allowGivenDeny single term computation not as expected") } // Input: @@ -94,7 +94,7 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { denyPath := SymbolicPath{conjDeny, conjDeny} fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) - fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.string()) + fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, "(s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ "(s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ @@ -102,7 +102,7 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`)\n"+ "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`)\n"+ "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`)", - allowGivenDeny.string(), "allowGivenDeny three terms computation not as expected") + allowGivenDeny.String(), "allowGivenDeny three terms computation not as expected") } // Input: @@ -129,10 +129,10 @@ func TestComputeAllowGivenDenyAllowTautology(t *testing.T) { denyPath := SymbolicPath{conjDeny, conjDeny} fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) - fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.string()) + fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, "(s1` != str1`) to (*)\n(s2` != str2`) to (*)\n(s3` != str3`) to (*)\n(*) to (s1` != str1`)\n"+ - "(*) to (s2` != str2`)\n(*) to (s3` != str3`)", allowGivenDeny.string(), + "(*) to (s2` != str2`)\n(*) to (s3` != str3`)", allowGivenDeny.String(), "allowGivenDeny allow tautology computation not as expected") } @@ -155,8 +155,8 @@ func TestComputeAllowGivenDenyDenyTautology(t *testing.T) { denyPath := SymbolicPath{tautologyConj, tautologyConj} fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) - fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.string()) - require.Equal(t, emptySet, allowGivenDeny.string(), + fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) + require.Equal(t, emptySet, allowGivenDeny.String(), "allowGivenDeny deny tautology computation not as expected") } @@ -203,9 +203,9 @@ func TestComputeAllowGivenDenies(t *testing.T) { conjDenyDst := Conjunction{atomicDenyDst} denyPaths = append(denyPaths, &SymbolicPath{conjDenySrc, conjDenyDst}) } - fmt.Printf("allowPaths:\n%v\ndenyPaths:\n%v\n", allowPaths.string(), denyPaths.string()) + fmt.Printf("allowPaths:\n%v\ndenyPaths:\n%v\n", allowPaths.String(), denyPaths.String()) res := ComputeAllowGivenDenies(&allowPaths, &denyPaths) - fmt.Printf("ComputeAllowGivenDenies:\n%v\n", res.string()) + fmt.Printf("ComputeAllowGivenDenies:\n%v\n", res.String()) require.Equal(t, "(tag = t0 and segment != s0 and segment != s2 and segment != s4) to (tag = t1)\n"+ "(tag = t0 and segment != s0 and segment != s2) to (tag = t1 and segment != s5)\n"+ "(tag = t0 and segment != s0 and segment != s4) to (tag = t1 and segment != s3)\n"+ @@ -222,7 +222,7 @@ func TestComputeAllowGivenDenies(t *testing.T) { "(tag = t2 and segment != s2) to (tag = t3 and segment != s1 and segment != s5)\n"+ "(tag = t2 and segment != s4) to (tag = t3 and segment != s1 and segment != s3)\n"+ "(tag = t2) to (tag = t3 and segment != s1 and segment != s3 and segment != s5)", - ComputeAllowGivenDenies(&allowPaths, &denyPaths).string(), + ComputeAllowGivenDenies(&allowPaths, &denyPaths).String(), "ComputeAllowGivenDenies computation not as expected") } @@ -244,7 +244,7 @@ func TestAllowDenyOptimizeEmptyPath(t *testing.T) { denyPath := SymbolicPath{conjSrc1, conjDst1} allowWithDeny := ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) fmt.Printf("allow path: %v with higher priority deny path:%v is:\n%v\n\n", - allowPath.string(), denyPath.string(), allowWithDeny.string()) + allowPath.string(), denyPath.string(), allowWithDeny.String()) negateAtomic1 := atomic1.negate().(atomicTerm) require.Equal(t, true, atomic1.isNegateOf(negateAtomic1), "isNegateOf does not work") for _, thisPath := range *allowWithDeny { @@ -254,5 +254,5 @@ func TestAllowDenyOptimizeEmptyPath(t *testing.T) { require.Equal(t, true, (*allowWithDeny)[0].Src.isEmptySet(), "isEmptySet() does not work properly") require.Equal(t, false, (*allowWithDeny)[1].Src.isEmptySet(), "isEmptySet() does not work properly") newPath := allowWithDeny.removeEmpty() - fmt.Printf("newPath %v\n", newPath.string()) + fmt.Printf("newPath %v\n", newPath.String()) } diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 39ec313..ab62ff3 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -5,6 +5,7 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/collector" "github.com/np-guard/vmware-analyzer/pkg/model" "github.com/np-guard/vmware-analyzer/pkg/model/dfw" + "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" "strings" ) @@ -15,24 +16,32 @@ func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.Out return "", err } config := parser.GetConfig() - for _, category := range config.Fw.CategoriesSpecs { + preProcessing(config.Fw.CategoriesSpecs) + return "", nil +} + +// preProcessing: convert rules from spec to symbolicRules struct +func preProcessing(categoriesSpecs []*dfw.CategorySpec) []*symbolicRules { + for _, category := range categoriesSpecs { if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { fmt.Printf("no rules in category %v\n", category.Category) continue } fmt.Printf("\ncategory: %v\n===============\n", category.Category) fmt.Println("Outbound rules:") - printRules(category.ProcessedRules.Outbound) + convertRulesToSymbolicPaths(category.ProcessedRules.Outbound) fmt.Println("Inbound rules:") - printRules(category.ProcessedRules.Inbound) + convertRulesToSymbolicPaths(category.ProcessedRules.Inbound) } - return "", nil + return nil } -func printRules(rules []*dfw.FwRule) { +func convertRulesToSymbolicPaths(rules []*dfw.FwRule) { for _, rule := range rules { fmt.Printf("\taction %v SourceGroups: %v DestinationGroups: %v\n", rule.Action, getGroupsStr(rule.SrcGroups, rule.IsAllSrcGroups), getGroupsStr(rule.DstGroups, rule.IsAllDstGroups)) + ruleSymbolicPaths := symbolicexpr.ConvertFWRuleToSymbolicPaths(rule) + fmt.Printf("converted path: %v\n", ruleSymbolicPaths.String()) } } From b58a20de59bb7d502b0ba8d1837de6db35d03b48 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 18 Dec 2024 17:12:22 +0200 Subject: [PATCH 18/76] gathered the data into the relevant data structure and moved printing to the higher level --- pkg/synthesis/model.go | 36 ++++++++++++++++++++++-------------- pkg/synthesis/synthesis.go | 38 ++++++++++++-------------------------- 2 files changed, 34 insertions(+), 40 deletions(-) diff --git a/pkg/synthesis/model.go b/pkg/synthesis/model.go index 333ddae..600404f 100644 --- a/pkg/synthesis/model.go +++ b/pkg/synthesis/model.go @@ -1,6 +1,9 @@ package synthesis import ( + "fmt" + "strings" + "github.com/np-guard/vmware-analyzer/pkg/collector" "github.com/np-guard/vmware-analyzer/pkg/model/dfw" "github.com/np-guard/vmware-analyzer/pkg/model/endpoints" @@ -21,14 +24,15 @@ type AbstractModelSyn struct { // Tags map from tag's name to the tag type Tags map[string]*collector.Tag -// RuleForSynthesis input to synthesis. Synthesis very likely to non-prioritized only allow rules +// SymbolicRule input to synthesis. Synthesis very likely to non-prioritized only allow rules // -//nolint:all // todo: tmp for defs without implementation -type RuleForSynthesis struct { // original rule +//nolint:all +type SymbolicRule struct { // original rule origRule *dfw.FwRule // original rule // category; for reference, e.g. in the labels or documentation of the synthesized objects // a pass rule is interpreted as deny for the current category - origRuleCategory dfw.DfwCategory + origRuleCategory dfw.DfwCategory + origSymbolicPaths *symbolicexpr.SymbolicPaths // symbolic presentation paths defined by the original rule // The following refers to conversion of original allow rule to symbolic paths, as follows: // Assuming there are only allow (non-prioritized, of course) rules. // This is relevant only for allow rules (nil otherwise) @@ -42,8 +46,8 @@ type RuleForSynthesis struct { // original rule //nolint:all // todo: tmp for defs without implementation type symbolicRules struct { - inbound []*RuleForSynthesis // ordered list inbound RuleForSynthesis - outbound []*RuleForSynthesis // ordered list outbound RuleForSynthesis + inbound []*SymbolicRule // ordered list inbound SymbolicRule + outbound []*SymbolicRule // ordered list outbound SymbolicRule } // maps used by AbstractModelSyn @@ -54,12 +58,16 @@ type Segments map[string]*collector.Segment // VMs map from VM name to the VM type VMs map[string]*endpoints.VM -// ComputeSymbolicRules computes abstract rules in model for synthesis -// todo: will have to combine different categories into a single list of inbound, outbound -// -//nolint:all // todo: tmp for defs without implementation -func computeSymbolicRules(fireWall dfw.DFW) symbolicRules { - _ = fireWall - symbolicexpr.ComputeAllowGivenDenies(&symbolicexpr.SymbolicPaths{}, &symbolicexpr.SymbolicPaths{}) - return symbolicRules{nil, nil} +func (allRules symbolicRules) string() string { + return "\nsymbolicInbound Rules:\n~~~~~~~~~~~~~~~~~~~~~~~\n" + strSymbolicRules(allRules.inbound) + + "\nsymbolicOutbound Rules:\n~~~~~~~~~~~~~~~~~~~~~~~~~\n" + strSymbolicRules(allRules.outbound) +} + +func strSymbolicRules(rules []*SymbolicRule) string { + resStr := make([]string, len(rules)) + for i, rule := range rules { + resStr[i] = fmt.Sprintf("\tcategory: %v action: %v paths: %v", rule.origRuleCategory, rule.origRule.Action, + rule.origSymbolicPaths) + } + return strings.Join(resStr, "\n") } diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index ab62ff3..02a49d5 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -6,7 +6,6 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/model" "github.com/np-guard/vmware-analyzer/pkg/model/dfw" "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" - "strings" ) func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.OutputParameters) (string, error) { @@ -16,42 +15,29 @@ func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.Out return "", err } config := parser.GetConfig() - preProcessing(config.Fw.CategoriesSpecs) + symbolicRules := symbolicRules{} + symbolicRules.inbound, symbolicRules.outbound = preProcessing(config.Fw.CategoriesSpecs) + fmt.Println(symbolicRules.string()) return "", nil } // preProcessing: convert rules from spec to symbolicRules struct -func preProcessing(categoriesSpecs []*dfw.CategorySpec) []*symbolicRules { +func preProcessing(categoriesSpecs []*dfw.CategorySpec) (inbound, outbound []*SymbolicRule) { for _, category := range categoriesSpecs { if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { - fmt.Printf("no rules in category %v\n", category.Category) continue } - fmt.Printf("\ncategory: %v\n===============\n", category.Category) - fmt.Println("Outbound rules:") - convertRulesToSymbolicPaths(category.ProcessedRules.Outbound) - fmt.Println("Inbound rules:") - convertRulesToSymbolicPaths(category.ProcessedRules.Inbound) + inbound = append(inbound, convertRulesToSymbolicPaths(category.ProcessedRules.Inbound, category.Category)...) + outbound = append(outbound, convertRulesToSymbolicPaths(category.ProcessedRules.Outbound, category.Category)...) } - return nil + return inbound, outbound } -func convertRulesToSymbolicPaths(rules []*dfw.FwRule) { - for _, rule := range rules { - fmt.Printf("\taction %v SourceGroups: %v DestinationGroups: %v\n", rule.Action, - getGroupsStr(rule.SrcGroups, rule.IsAllSrcGroups), getGroupsStr(rule.DstGroups, rule.IsAllDstGroups)) +func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) []*SymbolicRule { + res := make([]*SymbolicRule, len(rules)) + for i, rule := range rules { ruleSymbolicPaths := symbolicexpr.ConvertFWRuleToSymbolicPaths(rule) - fmt.Printf("converted path: %v\n", ruleSymbolicPaths.String()) + res[i] = &SymbolicRule{origRule: rule, origRuleCategory: category, origSymbolicPaths: ruleSymbolicPaths} } -} - -func getGroupsStr(groups []*collector.Group, isAll bool) string { - if isAll { - return "Any" - } - groupsStr := make([]string, len(groups)) - for i, group := range groups { - groupsStr[i] = *group.DisplayName - } - return strings.Join(groupsStr, ", ") + return res } From f44b33912c4844ae969ff6b40e5de5ad6a48d099 Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 08:27:38 +0200 Subject: [PATCH 19/76] renaming --- pkg/synthesis/model.go | 33 ++++++++++++++++----------------- pkg/synthesis/synthesis.go | 16 ++++++++-------- 2 files changed, 24 insertions(+), 25 deletions(-) diff --git a/pkg/synthesis/model.go b/pkg/synthesis/model.go index 600404f..8468b14 100644 --- a/pkg/synthesis/model.go +++ b/pkg/synthesis/model.go @@ -18,36 +18,35 @@ type AbstractModelSyn struct { tags Tags // todo: should be computed by the collector or here? vms VMs atomics symbolicexpr.Atomics // todo: should be used and maintained by FwRule - rules []*symbolicRules // with default deny + policy []*symbolicPolicy // with default deny } // Tags map from tag's name to the tag type Tags map[string]*collector.Tag -// SymbolicRule input to synthesis. Synthesis very likely to non-prioritized only allow rules +// symbolicRule input to synthesis. Synthesis very likely to non-prioritized only allow policy // //nolint:all -type SymbolicRule struct { // original rule +type symbolicRule struct { // original rule origRule *dfw.FwRule // original rule // category; for reference, e.g. in the labels or documentation of the synthesized objects // a pass rule is interpreted as deny for the current category origRuleCategory dfw.DfwCategory origSymbolicPaths *symbolicexpr.SymbolicPaths // symbolic presentation paths defined by the original rule // The following refers to conversion of original allow rule to symbolic paths, as follows: - // Assuming there are only allow (non-prioritized, of course) rules. - // This is relevant only for allow rules (nil otherwise) + // Assuming there are only allow (non-prioritized, of course) policy. + // This is relevant only for allow policy (nil otherwise) allowOnlyRulePaths symbolicexpr.SymbolicPaths - allowOnlyEffectingRules []*dfw.FwRule // rules effecting allowOnlyRulePaths (potentially higher priority pass and deny) - // Assuming there are prioritized allow and deny rules (but no categories and pass) - // This is relevant for allow and deny rules (pass nil), priorities are the same as of the original rules + allowOnlyEffectingRules []*dfw.FwRule // policy effecting allowOnlyRulePaths (potentially higher priority pass and deny) + // Assuming there are prioritized allow and deny policy (but no categories and pass) + // This is relevant for allow and deny policy (pass nil), priorities are the same as of the original policy allowAndDenyRulesPaths symbolicexpr.SymbolicPaths - allowAndDenyEffectingRules []*dfw.FwRule // rules effecting allowAndDenyRulesPaths (potentially higher priority pass) + allowAndDenyEffectingRules []*dfw.FwRule // policy effecting allowAndDenyRulesPaths (potentially higher priority pass) } -//nolint:all // todo: tmp for defs without implementation -type symbolicRules struct { - inbound []*SymbolicRule // ordered list inbound SymbolicRule - outbound []*SymbolicRule // ordered list outbound SymbolicRule +type symbolicPolicy struct { + inbound []*symbolicRule // ordered list inbound symbolicRule + outbound []*symbolicRule // ordered list outbound symbolicRule } // maps used by AbstractModelSyn @@ -58,12 +57,12 @@ type Segments map[string]*collector.Segment // VMs map from VM name to the VM type VMs map[string]*endpoints.VM -func (allRules symbolicRules) string() string { - return "\nsymbolicInbound Rules:\n~~~~~~~~~~~~~~~~~~~~~~~\n" + strSymbolicRules(allRules.inbound) + - "\nsymbolicOutbound Rules:\n~~~~~~~~~~~~~~~~~~~~~~~~~\n" + strSymbolicRules(allRules.outbound) +func (policy symbolicPolicy) string() string { + return "\nsymbolicInbound Rules:\n~~~~~~~~~~~~~~~~~~~~~~~\n" + strSymbolicRules(policy.inbound) + + "\nsymbolicOutbound Rules:\n~~~~~~~~~~~~~~~~~~~~~~~~~\n" + strSymbolicRules(policy.outbound) } -func strSymbolicRules(rules []*SymbolicRule) string { +func strSymbolicRules(rules []*symbolicRule) string { resStr := make([]string, len(rules)) for i, rule := range rules { resStr[i] = fmt.Sprintf("\tcategory: %v action: %v paths: %v", rule.origRuleCategory, rule.origRule.Action, diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 02a49d5..75c2696 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -15,14 +15,14 @@ func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.Out return "", err } config := parser.GetConfig() - symbolicRules := symbolicRules{} - symbolicRules.inbound, symbolicRules.outbound = preProcessing(config.Fw.CategoriesSpecs) - fmt.Println(symbolicRules.string()) + policy := symbolicPolicy{} + policy.inbound, policy.outbound = preProcessing(config.Fw.CategoriesSpecs) + fmt.Println(policy.string()) return "", nil } -// preProcessing: convert rules from spec to symbolicRules struct -func preProcessing(categoriesSpecs []*dfw.CategorySpec) (inbound, outbound []*SymbolicRule) { +// preProcessing: convert policy from spec to symbolicPolicy struct +func preProcessing(categoriesSpecs []*dfw.CategorySpec) (inbound, outbound []*symbolicRule) { for _, category := range categoriesSpecs { if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { continue @@ -33,11 +33,11 @@ func preProcessing(categoriesSpecs []*dfw.CategorySpec) (inbound, outbound []*Sy return inbound, outbound } -func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) []*SymbolicRule { - res := make([]*SymbolicRule, len(rules)) +func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) []*symbolicRule { + res := make([]*symbolicRule, len(rules)) for i, rule := range rules { ruleSymbolicPaths := symbolicexpr.ConvertFWRuleToSymbolicPaths(rule) - res[i] = &SymbolicRule{origRule: rule, origRuleCategory: category, origSymbolicPaths: ruleSymbolicPaths} + res[i] = &symbolicRule{origRule: rule, origRuleCategory: category, origSymbolicPaths: ruleSymbolicPaths} } return res } From a8380bbb58aeaa846812f2385d709fedcf10903b Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 08:35:42 +0200 Subject: [PATCH 20/76] minor reorg --- pkg/synthesis/synthesis.go | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 75c2696..ca7ae62 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -15,22 +15,24 @@ func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.Out return "", err } config := parser.GetConfig() - policy := symbolicPolicy{} - policy.inbound, policy.outbound = preProcessing(config.Fw.CategoriesSpecs) + policy := preProcessing(config.Fw.CategoriesSpecs) fmt.Println(policy.string()) return "", nil } // preProcessing: convert policy from spec to symbolicPolicy struct -func preProcessing(categoriesSpecs []*dfw.CategorySpec) (inbound, outbound []*symbolicRule) { +func preProcessing(categoriesSpecs []*dfw.CategorySpec) (policy symbolicPolicy) { + policy = symbolicPolicy{} for _, category := range categoriesSpecs { if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { continue } - inbound = append(inbound, convertRulesToSymbolicPaths(category.ProcessedRules.Inbound, category.Category)...) - outbound = append(outbound, convertRulesToSymbolicPaths(category.ProcessedRules.Outbound, category.Category)...) + policy.inbound = append(policy.inbound, convertRulesToSymbolicPaths(category.ProcessedRules.Inbound, + category.Category)...) + policy.outbound = append(policy.outbound, convertRulesToSymbolicPaths(category.ProcessedRules.Outbound, + category.Category)...) } - return inbound, outbound + return policy } func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) []*symbolicRule { From 33455b814bc0b593630f5b65ca74d8b4683b2486 Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 09:00:02 +0200 Subject: [PATCH 21/76] test preProcessing --- pkg/synthesis/synthesis_test.go | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 0030599..42377c9 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -23,20 +23,21 @@ var allTests = []synthesisTest{ }, } -func (a *synthesisTest) run(t *testing.T) { - params := model.OutputParameters{ - Format: "txt", - } +func (a *synthesisTest) runPreprocessing(t *testing.T) { rc := data.ExamplesGeneration(a.exData) - res, err := NSXSynthesis(rc, params) + parser := model.NewNSXConfigParserFromResourcesContainer(rc) + err := parser.RunParser() require.Nil(t, err) - fmt.Println(res) + config := parser.GetConfig() + policy := preProcessing(config.Fw.CategoriesSpecs) + fmt.Println(policy.string()) + // todo: test via comparing output to files in a separate PR (issue with window in analyzer tests) } -func TestSynthesis(t *testing.T) { +func TestPreprocessing(t *testing.T) { logging.Init(logging.HighVerbosity) for i := range allTests { test := &allTests[i] - test.run(t) + test.runPreprocessing(t) } } From 91286b5e13dfe3b7d0a7b8061732bd5de333a833 Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 09:08:01 +0200 Subject: [PATCH 22/76] moved redundant code --- pkg/model/dfw/rule.go | 5 ----- 1 file changed, 5 deletions(-) diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index 832389c..34ea855 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -316,8 +316,3 @@ func (f *FwRule) originalRuleStr() string { common.Reset, ) } - -// ComputeSymbolic computes symbolicSrc and symbolicDst -func (f *FwRule) ComputeSymbolic() { - -} From 266ebf12b51367f24647f4988d068cc11b5412e3 Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 09:32:09 +0200 Subject: [PATCH 23/76] temp WA --- pkg/synthesis/synthesis_test.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 42377c9..d80c461 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -17,10 +17,11 @@ type synthesisTest struct { } var allTests = []synthesisTest{ - { - name: "ExampleDumb", - exData: data.ExampleDumb, - }, + // todo tmp comment due to unclear issue in git tests + // { + // name: "ExampleDumb", + // exData: data.ExampleDumb, + // }, } func (a *synthesisTest) runPreprocessing(t *testing.T) { From 317c27b74b9323a2e1f524e203c7b4b6d1e3144f Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 09:47:34 +0200 Subject: [PATCH 24/76] fixed code so that tests will work after the changes --- pkg/symbolicexpr/atomic.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/symbolicexpr/atomic.go b/pkg/symbolicexpr/atomic.go index 3ca94d0..7b3eec1 100644 --- a/pkg/symbolicexpr/atomic.go +++ b/pkg/symbolicexpr/atomic.go @@ -20,6 +20,8 @@ func (term atomicTerm) string() string { labelType = "tag " + term.property.Name() case *collector.Group: labelType = "group " + default: // for structs used for testing + labelType = term.property.Name() } return labelType + equalSign + term.toVal } From 509fe93d69ad3d5fbde44db44f6113b8d1e784d8 Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 09:54:09 +0200 Subject: [PATCH 25/76] the issue seems to be in the PR pipe and not in my code --- pkg/synthesis/synthesis_test.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index d80c461..42377c9 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -17,11 +17,10 @@ type synthesisTest struct { } var allTests = []synthesisTest{ - // todo tmp comment due to unclear issue in git tests - // { - // name: "ExampleDumb", - // exData: data.ExampleDumb, - // }, + { + name: "ExampleDumb", + exData: data.ExampleDumb, + }, } func (a *synthesisTest) runPreprocessing(t *testing.T) { From c48a6fb4498b1ca8c90ba1dd624bbf67dd2da078 Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 09:57:46 +0200 Subject: [PATCH 26/76] update wrt latest changes --- pkg/synthesis/synthesis_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 42377c9..1a79a4e 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -19,7 +19,7 @@ type synthesisTest struct { var allTests = []synthesisTest{ { name: "ExampleDumb", - exData: data.ExampleDumb, + exData: data.ExampleDumbeldore, }, } From 61543c3bef0fa44a11f57fdbc1ae97a8d091ee9f Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 10:02:14 +0200 Subject: [PATCH 27/76] lint --- pkg/symbolicexpr/symbolicPath.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 117ca82..058dd9c 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -100,19 +100,19 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli // ConvertFWRuleToSymbolicPaths given a rule, converts its src, dst and conn to SymbolicPaths func ConvertFWRuleToSymbolicPaths(rule *dfw.FwRule) *SymbolicPaths { resSymbolicPaths := SymbolicPaths{} - any := Conjunction{tautology{}} + tarmAny := Conjunction{tautology{}} srcTerms := getAtomicTermsForGroups(rule.SrcGroups) dstTerms := getAtomicTermsForGroups(rule.DstGroups) switch { case rule.IsAllSrcGroups && rule.IsAllDstGroups: - resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{any, any}) + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{tarmAny, tarmAny}) case rule.IsAllSrcGroups: for _, dstTerm := range dstTerms { - resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{any, Conjunction{dstTerm}}) + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{tarmAny, Conjunction{dstTerm}}) } case rule.IsAllDstGroups: for _, srcTerm := range srcTerms { - resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Conjunction{srcTerm}, any}) + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Conjunction{srcTerm}, tarmAny}) } default: for _, srcTerm := range srcTerms { From 51dca543386f45c58880753ecbbc8dbb25a138eb Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 10:07:32 +0200 Subject: [PATCH 28/76] lint --- pkg/synthesis/synthesis.go | 1 + 1 file changed, 1 insertion(+) diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index ca7ae62..2f00486 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -2,6 +2,7 @@ package synthesis import ( "fmt" + "github.com/np-guard/vmware-analyzer/pkg/collector" "github.com/np-guard/vmware-analyzer/pkg/model" "github.com/np-guard/vmware-analyzer/pkg/model/dfw" From 05007f9759d96442436c9963d9cd47c2636dd6a1 Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 12:11:19 +0200 Subject: [PATCH 29/76] temp commit --- pkg/synthesis/synthesis_test.go | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 1a79a4e..dc1e16a 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -2,6 +2,8 @@ package synthesis import ( "fmt" + "os" + "path/filepath" "testing" "github.com/stretchr/testify/require" @@ -11,6 +13,13 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/model" ) +// todo... +const ( + examplesDir = "examples/" + synthesisDir = "input/" + outDir = "out/" +) + type synthesisTest struct { name string exData data.Example @@ -41,3 +50,9 @@ func TestPreprocessing(t *testing.T) { test.runPreprocessing(t) } } + +// getTestsDirOut returns the path to the dir where test output files are located +func getTestsDirOut(testDir string) string { + currentDir, _ := os.Getwd() + return filepath.Join(currentDir, examplesDir+outDir+testDir) +} From 4416ed36574f8a2d382adb47d96e55d84c005296 Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 13:02:09 +0200 Subject: [PATCH 30/76] added explaination --- pkg/symbolicexpr/atomic.go | 1 + pkg/synthesis/model.go | 11 ++++++----- pkg/synthesis/synthesis_test.go | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/pkg/symbolicexpr/atomic.go b/pkg/symbolicexpr/atomic.go index 7b3eec1..f119f10 100644 --- a/pkg/symbolicexpr/atomic.go +++ b/pkg/symbolicexpr/atomic.go @@ -18,6 +18,7 @@ func (term atomicTerm) string() string { labelType = "virtual machine " case *collector.Tag: labelType = "tag " + term.property.Name() + // includes atomic NSX groups; e.g., groups defined over other entities (such as tags) are not included case *collector.Group: labelType = "group " default: // for structs used for testing diff --git a/pkg/synthesis/model.go b/pkg/synthesis/model.go index 8468b14..27db3d1 100644 --- a/pkg/synthesis/model.go +++ b/pkg/synthesis/model.go @@ -14,11 +14,12 @@ import ( // //nolint:all // todo: tmp for defs without implementation type AbstractModelSyn struct { - segments Segments - tags Tags // todo: should be computed by the collector or here? - vms VMs - atomics symbolicexpr.Atomics // todo: should be used and maintained by FwRule - policy []*symbolicPolicy // with default deny + vms VMs + // groupsToVms includes atomic NSX groups; e.g., groups defined over other entities (such as tags) are not included + groupsToVms map[*collector.Group]VMs // todo compute + tagsToVms map[*collector.Tag]VMs // todo compute + // todo: add similar maps to OS, hostname + policy []*symbolicPolicy // with default deny } // Tags map from tag's name to the tag diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 1a79a4e..aa9da25 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -18,7 +18,7 @@ type synthesisTest struct { var allTests = []synthesisTest{ { - name: "ExampleDumb", + name: "ExampleDumbeldore", exData: data.ExampleDumbeldore, }, } From 62070463d3d6441c8217e3a2c61ff7b530d0bfee Mon Sep 17 00:00:00 2001 From: shirim Date: Thu, 19 Dec 2024 14:35:38 +0200 Subject: [PATCH 31/76] synthesis test - compare to file --- pkg/synthesis/synthesis_test.go | 25 +++++++++++++------ .../ExampleDumbeldore.txt | 13 ++++++++++ 2 files changed, 30 insertions(+), 8 deletions(-) create mode 100644 pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 7dff6b6..0df53cc 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "path/filepath" + "strings" "testing" "github.com/stretchr/testify/require" @@ -15,9 +16,8 @@ import ( // todo... const ( - examplesDir = "examples/" - synthesisDir = "input/" - outDir = "out/" + expectedOutput = "tests_expected_output/" + carriageReturn = "\r" ) type synthesisTest struct { @@ -35,12 +35,16 @@ var allTests = []synthesisTest{ func (a *synthesisTest) runPreprocessing(t *testing.T) { rc := data.ExamplesGeneration(a.exData) parser := model.NewNSXConfigParserFromResourcesContainer(rc) - err := parser.RunParser() - require.Nil(t, err) + err1 := parser.RunParser() + require.Nil(t, err1) config := parser.GetConfig() policy := preProcessing(config.Fw.CategoriesSpecs) fmt.Println(policy.string()) - // todo: test via comparing output to files in a separate PR (issue with window in analyzer tests) + expectedOutputFileName := filepath.Join(getTestsDirOut(), a.name+".txt") + expectedOutput, err2 := os.ReadFile(expectedOutputFileName) + require.Nil(t, err2) + expectedOutputStr := string(expectedOutput) + require.Equal(t, cleanStr(policy.string()), cleanStr(expectedOutputStr), "output not as expected") } func TestPreprocessing(t *testing.T) { @@ -52,7 +56,12 @@ func TestPreprocessing(t *testing.T) { } // getTestsDirOut returns the path to the dir where test output files are located -func getTestsDirOut(testDir string) string { +func getTestsDirOut() string { currentDir, _ := os.Getwd() - return filepath.Join(currentDir, examplesDir+outDir+testDir) + return filepath.Join(currentDir, expectedOutput) +} + +// comparison should be insensitive to line comparators; cleaning strings from line comparators +func cleanStr(str string) string { + return strings.ReplaceAll(strings.ReplaceAll(str, "/n", ""), carriageReturn, "") } diff --git a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt new file mode 100644 index 0000000..865373e --- /dev/null +++ b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt @@ -0,0 +1,13 @@ + +symbolicInbound Rules: +~~~~~~~~~~~~~~~~~~~~~~~ + category: 4 action: allow paths: (group = DumbledoreAll) to (*) + category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) + category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) + category: 4 action: deny paths: (*) to (*) +symbolicOutbound Rules: +~~~~~~~~~~~~~~~~~~~~~~~~~ + category: 4 action: allow paths: (group = DumbledoreAll) to (*) + category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) + category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) + category: 4 action: deny paths: (*) to (*) \ No newline at end of file From 98d0f6c5063d96a9c2b8adba4dd77c086a062a79 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 09:28:55 +0200 Subject: [PATCH 32/76] improve documentation --- pkg/synthesis/model.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/synthesis/model.go b/pkg/synthesis/model.go index 27db3d1..f0f16b0 100644 --- a/pkg/synthesis/model.go +++ b/pkg/synthesis/model.go @@ -15,9 +15,9 @@ import ( //nolint:all // todo: tmp for defs without implementation type AbstractModelSyn struct { vms VMs - // groupsToVms includes atomic NSX groups; e.g., groups defined over other entities (such as tags) are not included - groupsToVms map[*collector.Group]VMs // todo compute - tagsToVms map[*collector.Tag]VMs // todo compute + // epToGroups includes atomic NSX groups; e.g., groups defined over other entities (such as tags) are not included + epToGroups map[*endpoints.VM]collector.Group // todo compute + epToTags map[*endpoints.VM]Tags // todo compute // todo: add similar maps to OS, hostname policy []*symbolicPolicy // with default deny } From 9dd9bfe9d9ab733a5dabe3a8290602d19c806e5c Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 09:47:53 +0200 Subject: [PATCH 33/76] added connection to SymbolicPath; for now initializes with all connections --- pkg/symbolicexpr/model.go | 8 +++++--- pkg/symbolicexpr/symbolicPath.go | 26 +++++++++++++++++--------- pkg/symbolicexpr/symbolicexpr_test.go | 27 ++++++++++++++------------- 3 files changed, 36 insertions(+), 25 deletions(-) diff --git a/pkg/symbolicexpr/model.go b/pkg/symbolicexpr/model.go index 9e1767d..d4baa12 100644 --- a/pkg/symbolicexpr/model.go +++ b/pkg/symbolicexpr/model.go @@ -1,5 +1,7 @@ package symbolicexpr +import "github.com/np-guard/models/pkg/netset" + // the package implements a symbolic expression of enabled paths from symbolic src to symbolic dst, expressed as CNF // Virtual machines' properties used in atomic group expr, e.g. group = Gryffindor, tag = "backend" @@ -37,9 +39,9 @@ type Conjunction []atomic // SymbolicPath all path from a Src VM satisfying Src to Dst VM satisfying Dst type SymbolicPath struct { - Src Conjunction - Dst Conjunction - // ToDo: add Conn + Src Conjunction + Dst Conjunction + Conn *netset.TransportSet } type SymbolicPaths []*SymbolicPath diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 058dd9c..729bd1b 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -2,6 +2,7 @@ package symbolicexpr import ( "fmt" + "github.com/np-guard/models/pkg/netset" "strings" "github.com/np-guard/vmware-analyzer/pkg/collector" @@ -78,9 +79,11 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli if !srcAtom.isTautology() { srcAtomNegate := srcAtom.negate().(atomicTerm) if allowPath.Src.isTautology() { - resAllowPaths = append(resAllowPaths, &SymbolicPath{Conjunction{&srcAtomNegate}, allowPath.Dst}) + resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: Conjunction{&srcAtomNegate}, Dst: allowPath.Dst, + Conn: netset.AllTransports()}) } else { - resAllowPaths = append(resAllowPaths, &SymbolicPath{*allowPath.Src.copy().add(&srcAtomNegate), allowPath.Dst}) + resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: *allowPath.Src.copy().add(&srcAtomNegate), + Dst: allowPath.Dst, Conn: netset.AllTransports()}) } } } @@ -88,16 +91,18 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli if !dstAtom.isTautology() { dstAtomNegate := dstAtom.negate().(atomicTerm) if allowPath.Dst.isTautology() { - resAllowPaths = append(resAllowPaths, &SymbolicPath{allowPath.Src, Conjunction{&dstAtomNegate}}) + resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: Conjunction{&dstAtomNegate}, + Conn: netset.AllTransports()}) } else { - resAllowPaths = append(resAllowPaths, &SymbolicPath{allowPath.Src, *allowPath.Dst.copy().add(&dstAtomNegate)}) + resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: *allowPath.Dst.copy().add(&dstAtomNegate), + Conn: netset.AllTransports()}) } } } return &resAllowPaths } -// ConvertFWRuleToSymbolicPaths given a rule, converts its src, dst and conn to SymbolicPaths +// ConvertFWRuleToSymbolicPaths given a rule, converts its src, dst and Conn to SymbolicPaths func ConvertFWRuleToSymbolicPaths(rule *dfw.FwRule) *SymbolicPaths { resSymbolicPaths := SymbolicPaths{} tarmAny := Conjunction{tautology{}} @@ -105,19 +110,22 @@ func ConvertFWRuleToSymbolicPaths(rule *dfw.FwRule) *SymbolicPaths { dstTerms := getAtomicTermsForGroups(rule.DstGroups) switch { case rule.IsAllSrcGroups && rule.IsAllDstGroups: - resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{tarmAny, tarmAny}) + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: tarmAny, Dst: tarmAny, Conn: netset.AllTransports()}) case rule.IsAllSrcGroups: for _, dstTerm := range dstTerms { - resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{tarmAny, Conjunction{dstTerm}}) + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: tarmAny, Dst: Conjunction{dstTerm}, + Conn: netset.AllTransports()}) } case rule.IsAllDstGroups: for _, srcTerm := range srcTerms { - resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Conjunction{srcTerm}, tarmAny}) + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: Conjunction{srcTerm}, Dst: tarmAny, + Conn: netset.AllTransports()}) } default: for _, srcTerm := range srcTerms { for _, dstTerm := range dstTerms { - resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Conjunction{srcTerm}, Conjunction{dstTerm}}) + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: Conjunction{srcTerm}, + Dst: Conjunction{dstTerm}, Conn: netset.AllTransports()}) } } } diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index ac1537a..660b857 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -2,6 +2,7 @@ package symbolicexpr import ( "fmt" + "github.com/np-guard/models/pkg/netset" "testing" "github.com/stretchr/testify/require" @@ -28,7 +29,7 @@ func TestSymbolicPaths(t *testing.T) { negateAtomic := atomic.negate().(atomicTerm) conjDst = *conjDst.add(&negateAtomic) } - conjSymbolicPath := SymbolicPath{conjSrc, conjDst} + conjSymbolicPath := SymbolicPath{Src: conjSrc, Dst: conjDst, Conn: netset.AllTransports()} fmt.Printf("\nconjSymbolicPath:\n%v\n", conjSymbolicPath.string()) require.Equal(t, "(t1 = str1 and t2 = str2 and t3 = str3) to (t1 != str1 and t2 != str2 and t3 != str3)", conjSymbolicPath.string(), "conjSymbolicPath not as expected") @@ -59,8 +60,8 @@ func TestComputeAllowGivenDenySingleTermEach(t *testing.T) { testDst2 := initTestTag("d2") atomicDst2 := &atomicTerm{property: testDst2, toVal: "str2"} conjDst2 = *conjDst2.add(atomicDst2) - allowPath := SymbolicPath{conjSrc1, conjDst1} - denyPath := SymbolicPath{conjSrc2, conjDst2} + allowPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTransports()} + denyPath := SymbolicPath{Src: conjSrc2, Dst: conjDst2, Conn: netset.AllTransports()} fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) @@ -90,8 +91,8 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { atomicDeny := &atomicTerm{property: testDeny, toVal: fmt.Sprintf("str%v`", i)} conjDeny = *conjDeny.add(atomicDeny) } - allowPath := SymbolicPath{conjAllow, conjAllow} - denyPath := SymbolicPath{conjDeny, conjDeny} + allowPath := SymbolicPath{Src: conjAllow, Dst: conjAllow, Conn: netset.AllTransports()} + denyPath := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllTransports()} fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) @@ -125,8 +126,8 @@ func TestComputeAllowGivenDenyAllowTautology(t *testing.T) { conjDeny = *conjDeny.add(atomicDeny) } tautologyConj := Conjunction{tautology{}} - allowPath := SymbolicPath{tautologyConj, tautologyConj} - denyPath := SymbolicPath{conjDeny, conjDeny} + allowPath := SymbolicPath{Src: tautologyConj, Dst: tautologyConj, Conn: netset.AllTransports()} + denyPath := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllTransports()} fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) @@ -151,8 +152,8 @@ func TestComputeAllowGivenDenyDenyTautology(t *testing.T) { } fmt.Printf("conjAllow is %v\nisEmptySet%v\n\n", conjAllow.string(), conjAllow.isEmptySet()) tautologyConj := Conjunction{tautology{}} - allowPath := SymbolicPath{conjAllow, conjAllow} - denyPath := SymbolicPath{tautologyConj, tautologyConj} + allowPath := SymbolicPath{Src: conjAllow, Dst: conjAllow, Conn: netset.AllTransports()} + denyPath := SymbolicPath{Src: tautologyConj, Dst: tautologyConj, Conn: netset.AllTransports()} fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) @@ -195,13 +196,13 @@ func TestComputeAllowGivenDenies(t *testing.T) { atomicAllowDst := &atomicTerm{property: testTag, toVal: fmt.Sprintf("t%v", 2*i+1)} conjAllowSrc := Conjunction{atomicAllowSrc} conjAllowDst := Conjunction{atomicAllowDst} - allowPaths = append(allowPaths, &SymbolicPath{conjAllowSrc, conjAllowDst}) + allowPaths = append(allowPaths, &SymbolicPath{Src: conjAllowSrc, Dst: conjAllowDst, Conn: netset.AllTransports()}) } atomicDenySrc := &atomicTerm{property: testSegment, toVal: fmt.Sprintf("s%v", 2*i)} atomicDenyDst := &atomicTerm{property: testSegment, toVal: fmt.Sprintf("s%v", 2*i+1)} conjDenySrc := Conjunction{atomicDenySrc} conjDenyDst := Conjunction{atomicDenyDst} - denyPaths = append(denyPaths, &SymbolicPath{conjDenySrc, conjDenyDst}) + denyPaths = append(denyPaths, &SymbolicPath{Src: conjDenySrc, Dst: conjDenyDst, Conn: netset.AllTransports()}) } fmt.Printf("allowPaths:\n%v\ndenyPaths:\n%v\n", allowPaths.String(), denyPaths.String()) res := ComputeAllowGivenDenies(&allowPaths, &denyPaths) @@ -240,8 +241,8 @@ func TestAllowDenyOptimizeEmptyPath(t *testing.T) { testDst1 := initTestTag("d1") atomicDst1 := &atomicTerm{property: testDst1, toVal: "str1"} conjDst1 = *conjDst1.add(atomicDst1) - allowPath := SymbolicPath{conjSrc1, Conjunction{tautology{}}} - denyPath := SymbolicPath{conjSrc1, conjDst1} + allowPath := SymbolicPath{Src: conjSrc1, Dst: Conjunction{tautology{}}} + denyPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1} allowWithDeny := ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) fmt.Printf("allow path: %v with higher priority deny path:%v is:\n%v\n\n", allowPath.string(), denyPath.string(), allowWithDeny.String()) From 9f4ef5b79be5a263cf1b05277c0f4cfe4ce56779 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 10:04:37 +0200 Subject: [PATCH 34/76] update unit tests --- pkg/symbolicexpr/symbolicPath.go | 2 +- pkg/symbolicexpr/symbolicexpr_test.go | 65 +++++++++++++++------------ 2 files changed, 38 insertions(+), 29 deletions(-) diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 729bd1b..6ed25a9 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -10,7 +10,7 @@ import ( ) func (path *SymbolicPath) string() string { - return path.Src.string() + " to " + path.Dst.string() + return path.Src.string() + " to " + path.Dst.string() + " " + path.Conn.String() } // if the source or destination is empty then so is the entire path diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 660b857..db96f3f 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -31,7 +31,8 @@ func TestSymbolicPaths(t *testing.T) { } conjSymbolicPath := SymbolicPath{Src: conjSrc, Dst: conjDst, Conn: netset.AllTransports()} fmt.Printf("\nconjSymbolicPath:\n%v\n", conjSymbolicPath.string()) - require.Equal(t, "(t1 = str1 and t2 = str2 and t3 = str3) to (t1 != str1 and t2 != str2 and t3 != str3)", + require.Equal(t, "(t1 = str1 and t2 = str2 and t3 = str3) to (t1 != str1 and t2 != str2 and t3 != str3)"+ + " All Connections", conjSymbolicPath.string(), "conjSymbolicPath not as expected") println("conjEmpty", conjEmpty.string()) require.Equal(t, emptySet, conjEmpty.string(), "empty conjunction not as expected") @@ -65,7 +66,8 @@ func TestComputeAllowGivenDenySingleTermEach(t *testing.T) { fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) - require.Equal(t, "(s1 = str1 and s2 != str2) to (d1 = str1)\n(s1 = str1) to (d1 = str1 and d2 != str2)", + require.Equal(t, "(s1 = str1 and s2 != str2) to (d1 = str1) All Connections\n"+ + "(s1 = str1) to (d1 = str1 and d2 != str2) All Connections", allowGivenDeny.String(), "allowGivenDeny single term computation not as expected") } @@ -97,12 +99,18 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, - "(s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`)\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`)\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`)", + "(s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) to (s1 = str1 and s2 = str2 and s3 = str3) "+ + "All Connections\n"+ + "(s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`) to (s1 = str1 and s2 = str2 and s3 = str3)"+ + " All Connections\n"+ + "(s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`) to (s1 = str1 and s2 = str2 and s3 = str3)"+ + " All Connections\n"+ + "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`)"+ + " All Connections\n"+ + "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`)"+ + " All Connections\n"+ + "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`)"+ + " All Connections", allowGivenDeny.String(), "allowGivenDeny three terms computation not as expected") } @@ -132,8 +140,9 @@ func TestComputeAllowGivenDenyAllowTautology(t *testing.T) { allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, - "(s1` != str1`) to (*)\n(s2` != str2`) to (*)\n(s3` != str3`) to (*)\n(*) to (s1` != str1`)\n"+ - "(*) to (s2` != str2`)\n(*) to (s3` != str3`)", allowGivenDeny.String(), + "(s1` != str1`) to (*) All Connections\n(s2` != str2`) to (*) All Connections\n"+ + "(s3` != str3`) to (*) All Connections\n(*) to (s1` != str1`) All Connections\n"+ + "(*) to (s2` != str2`) All Connections\n(*) to (s3` != str3`) All Connections", allowGivenDeny.String(), "allowGivenDeny allow tautology computation not as expected") } @@ -207,22 +216,22 @@ func TestComputeAllowGivenDenies(t *testing.T) { fmt.Printf("allowPaths:\n%v\ndenyPaths:\n%v\n", allowPaths.String(), denyPaths.String()) res := ComputeAllowGivenDenies(&allowPaths, &denyPaths) fmt.Printf("ComputeAllowGivenDenies:\n%v\n", res.String()) - require.Equal(t, "(tag = t0 and segment != s0 and segment != s2 and segment != s4) to (tag = t1)\n"+ - "(tag = t0 and segment != s0 and segment != s2) to (tag = t1 and segment != s5)\n"+ - "(tag = t0 and segment != s0 and segment != s4) to (tag = t1 and segment != s3)\n"+ - "(tag = t0 and segment != s0) to (tag = t1 and segment != s3 and segment != s5)\n"+ - "(tag = t0 and segment != s2 and segment != s4) to (tag = t1 and segment != s1)\n"+ - "(tag = t0 and segment != s2) to (tag = t1 and segment != s1 and segment != s5)\n"+ - "(tag = t0 and segment != s4) to (tag = t1 and segment != s1 and segment != s3)\n"+ - "(tag = t0) to (tag = t1 and segment != s1 and segment != s3 and segment != s5)\n"+ - "(tag = t2 and segment != s0 and segment != s2 and segment != s4) to (tag = t3)\n"+ - "(tag = t2 and segment != s0 and segment != s2) to (tag = t3 and segment != s5)\n"+ - "(tag = t2 and segment != s0 and segment != s4) to (tag = t3 and segment != s3)\n"+ - "(tag = t2 and segment != s0) to (tag = t3 and segment != s3 and segment != s5)\n"+ - "(tag = t2 and segment != s2 and segment != s4) to (tag = t3 and segment != s1)\n"+ - "(tag = t2 and segment != s2) to (tag = t3 and segment != s1 and segment != s5)\n"+ - "(tag = t2 and segment != s4) to (tag = t3 and segment != s1 and segment != s3)\n"+ - "(tag = t2) to (tag = t3 and segment != s1 and segment != s3 and segment != s5)", + require.Equal(t, "(tag = t0 and segment != s0 and segment != s2 and segment != s4) to (tag = t1) All Connections\n"+ + "(tag = t0 and segment != s0 and segment != s2) to (tag = t1 and segment != s5) All Connections\n"+ + "(tag = t0 and segment != s0 and segment != s4) to (tag = t1 and segment != s3) All Connections\n"+ + "(tag = t0 and segment != s0) to (tag = t1 and segment != s3 and segment != s5) All Connections\n"+ + "(tag = t0 and segment != s2 and segment != s4) to (tag = t1 and segment != s1) All Connections\n"+ + "(tag = t0 and segment != s2) to (tag = t1 and segment != s1 and segment != s5) All Connections\n"+ + "(tag = t0 and segment != s4) to (tag = t1 and segment != s1 and segment != s3) All Connections\n"+ + "(tag = t0) to (tag = t1 and segment != s1 and segment != s3 and segment != s5) All Connections\n"+ + "(tag = t2 and segment != s0 and segment != s2 and segment != s4) to (tag = t3) All Connections\n"+ + "(tag = t2 and segment != s0 and segment != s2) to (tag = t3 and segment != s5) All Connections\n"+ + "(tag = t2 and segment != s0 and segment != s4) to (tag = t3 and segment != s3) All Connections\n"+ + "(tag = t2 and segment != s0) to (tag = t3 and segment != s3 and segment != s5) All Connections\n"+ + "(tag = t2 and segment != s2 and segment != s4) to (tag = t3 and segment != s1) All Connections\n"+ + "(tag = t2 and segment != s2) to (tag = t3 and segment != s1 and segment != s5) All Connections\n"+ + "(tag = t2 and segment != s4) to (tag = t3 and segment != s1 and segment != s3) All Connections\n"+ + "(tag = t2) to (tag = t3 and segment != s1 and segment != s3 and segment != s5) All Connections", ComputeAllowGivenDenies(&allowPaths, &denyPaths).String(), "ComputeAllowGivenDenies computation not as expected") } @@ -241,8 +250,8 @@ func TestAllowDenyOptimizeEmptyPath(t *testing.T) { testDst1 := initTestTag("d1") atomicDst1 := &atomicTerm{property: testDst1, toVal: "str1"} conjDst1 = *conjDst1.add(atomicDst1) - allowPath := SymbolicPath{Src: conjSrc1, Dst: Conjunction{tautology{}}} - denyPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1} + allowPath := SymbolicPath{Src: conjSrc1, Dst: Conjunction{tautology{}}, Conn: netset.AllTransports()} + denyPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTransports()} allowWithDeny := ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) fmt.Printf("allow path: %v with higher priority deny path:%v is:\n%v\n\n", allowPath.string(), denyPath.string(), allowWithDeny.String()) From c8c2cd3a6cae2624b6cc4ae0348c3a1fedf486b6 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 10:07:01 +0200 Subject: [PATCH 35/76] update abstract model end-to-end test --- .../tests_expected_output/ExampleDumbeldore.txt | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt index 865373e..8365003 100644 --- a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt +++ b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt @@ -1,13 +1,13 @@ symbolicInbound Rules: ~~~~~~~~~~~~~~~~~~~~~~~ - category: 4 action: allow paths: (group = DumbledoreAll) to (*) - category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) - category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) - category: 4 action: deny paths: (*) to (*) + category: 4 action: allow paths: (group = DumbledoreAll) to (*) All Connections + category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) All Connections + category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) All Connections + category: 4 action: deny paths: (*) to (*) All Connections symbolicOutbound Rules: ~~~~~~~~~~~~~~~~~~~~~~~~~ - category: 4 action: allow paths: (group = DumbledoreAll) to (*) - category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) - category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) - category: 4 action: deny paths: (*) to (*) \ No newline at end of file + category: 4 action: allow paths: (group = DumbledoreAll) to (*) All Connections + category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) All Connections + category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) All Connections + category: 4 action: deny paths: (*) to (*) All Connections \ No newline at end of file From 57cde7ab194619b7429bddc2c0388d0fa60ee67f Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 11:15:06 +0200 Subject: [PATCH 36/76] removed empty todo --- pkg/synthesis/synthesis_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 0df53cc..854d88d 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -14,7 +14,6 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/model" ) -// todo... const ( expectedOutput = "tests_expected_output/" carriageReturn = "\r" From 491b9aa64d89f0e45af176d71407d58b2899b007 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 11:21:41 +0200 Subject: [PATCH 37/76] move getAtomicTermsForGroups to the correct file --- pkg/symbolicexpr/atomic.go | 9 +++++++++ pkg/symbolicexpr/symbolicPath.go | 10 ---------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/pkg/symbolicexpr/atomic.go b/pkg/symbolicexpr/atomic.go index f119f10..1c91477 100644 --- a/pkg/symbolicexpr/atomic.go +++ b/pkg/symbolicexpr/atomic.go @@ -40,6 +40,15 @@ func (atomicTerm) isTautology() bool { return false } +// todo: handling only "in group" in this stage +func getAtomicTermsForGroups(groups []*collector.Group) []*atomicTerm { + res := make([]*atomicTerm, len(groups)) + for i, group := range groups { + res[i] = &atomicTerm{property: group, toVal: *group.DisplayName, neg: false} + } + return res +} + // returns true iff otherAt is negation of // once we cache the atomic terms, we can just compare pointers func (term atomicTerm) isNegateOf(otherAt atomic) bool { diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 058dd9c..c5963b9 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -4,7 +4,6 @@ import ( "fmt" "strings" - "github.com/np-guard/vmware-analyzer/pkg/collector" "github.com/np-guard/vmware-analyzer/pkg/model/dfw" ) @@ -123,12 +122,3 @@ func ConvertFWRuleToSymbolicPaths(rule *dfw.FwRule) *SymbolicPaths { } return &resSymbolicPaths } - -// todo: handling only "in group" in this stage -func getAtomicTermsForGroups(groups []*collector.Group) []*atomicTerm { - res := make([]*atomicTerm, len(groups)) - for i, group := range groups { - res[i] = &atomicTerm{property: group, toVal: *group.DisplayName, neg: false} - } - return res -} From 099437e6d9f09aed5a5b0faac2c1be3dce198868 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 11:58:39 +0200 Subject: [PATCH 38/76] exporting FwRule's connection Adding connection to symbolicPath functionality --- pkg/model/dfw/category.go | 8 ++++---- pkg/model/dfw/dfw.go | 2 +- pkg/model/dfw/rule.go | 14 +++++++------- pkg/symbolicexpr/symbolicPath.go | 23 +++++++++++++---------- 4 files changed, 25 insertions(+), 22 deletions(-) diff --git a/pkg/model/dfw/category.go b/pkg/model/dfw/category.go index 3616670..39349e4 100644 --- a/pkg/model/dfw/category.go +++ b/pkg/model/dfw/category.go @@ -120,13 +120,13 @@ func (c *CategorySpec) analyzeCategory(src, dst *endpoints.VM, isIngress bool, if rule.processedRuleCapturesPair(src, dst) /*rule.capturesPair(src, dst, isIngress)*/ { switch rule.Action { case actionAllow: - addedAllowedConns := rule.conn.Subtract(deniedConns).Subtract(jumpToAppConns) + addedAllowedConns := rule.Conn.Subtract(deniedConns).Subtract(jumpToAppConns) allowedConns = allowedConns.Union(addedAllowedConns) case actionDeny: - addedDeniedConns := rule.conn.Subtract(allowedConns).Subtract(jumpToAppConns) + addedDeniedConns := rule.Conn.Subtract(allowedConns).Subtract(jumpToAppConns) deniedConns = deniedConns.Union(addedDeniedConns) case actionJumpToApp: - addedJumpToAppConns := rule.conn.Subtract(allowedConns).Subtract(deniedConns) + addedJumpToAppConns := rule.Conn.Subtract(allowedConns).Subtract(deniedConns) jumpToAppConns = jumpToAppConns.Union(addedJumpToAppConns) } } @@ -190,7 +190,7 @@ func (c *CategorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups, s IsAllSrcGroups: isAllSrcGroup, DstGroups: dstGroups, IsAllDstGroups: isAllDstGroup, - conn: conn, + Conn: conn, Action: actionFromString(action), direction: direction, ruleID: ruleID, diff --git a/pkg/model/dfw/dfw.go b/pkg/model/dfw/dfw.go index a076259..6c7a0b1 100644 --- a/pkg/model/dfw/dfw.go +++ b/pkg/model/dfw/dfw.go @@ -142,7 +142,7 @@ func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroup newRule := &FwRule{ srcVMs: src, dstVMs: dst, - conn: netset.All(), // todo: change + Conn: netset.All(), // todo: change Action: actionFromString(actionStr), } categoryObj.rules = append(categoryObj.rules, newRule) diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index 34ea855..03b2c38 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -67,7 +67,7 @@ type FwRule struct { IsAllDstGroups bool // Scope implies additional condition on any Src and any Dst; will be added in one of the last stages ScopeGroups []*collector.Group - conn *netset.TransportSet + Conn *netset.TransportSet Action RuleAction direction string // "IN","OUT", "IN_OUT" origRuleObj *collector.Rule @@ -86,7 +86,7 @@ func (f *FwRule) effectiveRules() (inbound, outbound *FwRule) { logging.Debugf("rule %d has no effective inbound/outbound component, since its scope component is empty", f.ruleID) return nil, nil } - if f.conn.IsEmpty() { + if f.Conn.IsEmpty() { logging.Debugf("rule %d has no effective inbound/outbound component, since its traffic attributes are empty", f.ruleID) return nil, nil } @@ -122,7 +122,7 @@ func (f *FwRule) getInboundRule() *FwRule { IsAllSrcGroups: f.IsAllSrcGroups, IsAllDstGroups: f.IsAllDstGroups, ScopeGroups: f.ScopeGroups, - conn: f.conn, + Conn: f.Conn, Action: f.Action, direction: string(nsx.RuleDirectionIN), origRuleObj: f.origRuleObj, @@ -161,7 +161,7 @@ func (f *FwRule) getOutboundRule() *FwRule { IsAllSrcGroups: f.IsAllSrcGroups, IsAllDstGroups: f.IsAllDstGroups, ScopeGroups: f.ScopeGroups, - conn: f.conn, + Conn: f.Conn, Action: f.Action, direction: string(nsx.RuleDirectionOUT), origRuleObj: f.origRuleObj, @@ -200,12 +200,12 @@ func vmsString(vms []*endpoints.VM) string { // groups are interpreted to VM members in this representation func (f *FwRule) string() string { return fmt.Sprintf("ruleID: %d, src: %s, dst: %s, conn: %s, action: %s, direction: %s, scope: %s, sec-policy: %s", - f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.conn.String(), string(f.Action), f.direction, vmsString(f.scope), f.secPolicyName) + f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.Conn.String(), string(f.Action), f.direction, vmsString(f.scope), f.secPolicyName) } func (f *FwRule) effectiveRuleStr() string { return fmt.Sprintf("ruleID: %d, src: %s, dst: %s, conn: %s, action: %s, direction: %s, sec-policy: %s", - f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.conn.String(), string(f.Action), f.direction, f.secPolicyName) + f.ruleID, vmsString(f.srcVMs), vmsString(f.dstVMs), f.Conn.String(), string(f.Action), f.direction, f.secPolicyName) } func getDefaultRuleScope(r *collector.FirewallRule) string { @@ -253,7 +253,7 @@ func getRulesFormattedHeaderLine() string { "src", "dst", "conn", - "Action", + "action", "direction", "scope", "sec-policy", diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index bd59466..2459634 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -2,7 +2,6 @@ package symbolicexpr import ( "fmt" - "github.com/np-guard/models/pkg/netset" "strings" "github.com/np-guard/vmware-analyzer/pkg/model/dfw" @@ -14,7 +13,7 @@ func (path *SymbolicPath) string() string { // if the source or destination is empty then so is the entire path func (path *SymbolicPath) isEmpty() bool { - return path.Src.isEmptySet() || path.Dst.isEmptySet() + return path.Src.isEmptySet() || path.Dst.isEmptySet() || path.Conn.IsEmpty() } func (paths *SymbolicPaths) String() string { @@ -79,10 +78,10 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli srcAtomNegate := srcAtom.negate().(atomicTerm) if allowPath.Src.isTautology() { resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: Conjunction{&srcAtomNegate}, Dst: allowPath.Dst, - Conn: netset.AllTransports()}) + Conn: allowPath.Conn}) } else { resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: *allowPath.Src.copy().add(&srcAtomNegate), - Dst: allowPath.Dst, Conn: netset.AllTransports()}) + Dst: allowPath.Dst, Conn: allowPath.Conn}) } } } @@ -91,12 +90,16 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli dstAtomNegate := dstAtom.negate().(atomicTerm) if allowPath.Dst.isTautology() { resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: Conjunction{&dstAtomNegate}, - Conn: netset.AllTransports()}) + Conn: allowPath.Conn}) } else { resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: *allowPath.Dst.copy().add(&dstAtomNegate), - Conn: netset.AllTransports()}) + Conn: allowPath.Conn}) } } + if !denyPath.Conn.IsAll() { // Connection of deny path is not tautology + resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: allowPath.Dst, + Conn: allowPath.Conn.Subtract(denyPath.Conn)}) + } } return &resAllowPaths } @@ -109,22 +112,22 @@ func ConvertFWRuleToSymbolicPaths(rule *dfw.FwRule) *SymbolicPaths { dstTerms := getAtomicTermsForGroups(rule.DstGroups) switch { case rule.IsAllSrcGroups && rule.IsAllDstGroups: - resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: tarmAny, Dst: tarmAny, Conn: netset.AllTransports()}) + resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: tarmAny, Dst: tarmAny, Conn: rule.Conn}) case rule.IsAllSrcGroups: for _, dstTerm := range dstTerms { resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: tarmAny, Dst: Conjunction{dstTerm}, - Conn: netset.AllTransports()}) + Conn: rule.Conn}) } case rule.IsAllDstGroups: for _, srcTerm := range srcTerms { resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: Conjunction{srcTerm}, Dst: tarmAny, - Conn: netset.AllTransports()}) + Conn: rule.Conn}) } default: for _, srcTerm := range srcTerms { for _, dstTerm := range dstTerms { resSymbolicPaths = append(resSymbolicPaths, &SymbolicPath{Src: Conjunction{srcTerm}, - Dst: Conjunction{dstTerm}, Conn: netset.AllTransports()}) + Dst: Conjunction{dstTerm}, Conn: rule.Conn}) } } } From aabf5ea70c556a8ef836c5fb183067168b309a11 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 13:11:18 +0200 Subject: [PATCH 39/76] simplified code --- pkg/symbolicexpr/conjunction.go | 13 +++++++++++ pkg/symbolicexpr/symbolicPath.go | 31 ++++++++++++++------------- pkg/symbolicexpr/symbolicexpr_test.go | 12 ++--------- 3 files changed, 31 insertions(+), 25 deletions(-) diff --git a/pkg/symbolicexpr/conjunction.go b/pkg/symbolicexpr/conjunction.go index 7bab274..10582fe 100644 --- a/pkg/symbolicexpr/conjunction.go +++ b/pkg/symbolicexpr/conjunction.go @@ -35,6 +35,19 @@ func (c *Conjunction) isTautology() bool { return false } +func (c *Conjunction) removeTautology() Conjunction { + if len(*c) <= 1 { + return *c + } + newC := Conjunction{} + for _, atom := range *c { + if !atom.isTautology() { + newC = append(newC, atom) + } + } + return newC +} + // checks whether the Conjunction is empty: either syntactically, or contains an atomicTerm and its negation func (c *Conjunction) isEmptySet() bool { if len(*c) == 0 { diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 2459634..3ead597 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -37,6 +37,17 @@ func (paths *SymbolicPaths) removeEmpty() *SymbolicPaths { return &newPaths } +func (paths *SymbolicPaths) removeTautology() *SymbolicPaths { + newPaths := SymbolicPaths{} + for _, path := range *paths { + if !path.isEmpty() { + newPath := &SymbolicPath{Src: path.Src.removeTautology(), Dst: path.Dst.removeTautology(), Conn: path.Conn} + newPaths = append(newPaths, newPath) + } + } + return &newPaths +} + // ComputeAllowGivenDenies converts a set of symbolic allow and deny paths (given as type SymbolicPaths) // the resulting allow paths in SymbolicPaths // The motivation here is to unroll allow rule given higher priority deny rule @@ -76,32 +87,22 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli for _, srcAtom := range denyPath.Src { if !srcAtom.isTautology() { srcAtomNegate := srcAtom.negate().(atomicTerm) - if allowPath.Src.isTautology() { - resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: Conjunction{&srcAtomNegate}, Dst: allowPath.Dst, - Conn: allowPath.Conn}) - } else { - resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: *allowPath.Src.copy().add(&srcAtomNegate), - Dst: allowPath.Dst, Conn: allowPath.Conn}) - } + resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: *allowPath.Src.copy().add(&srcAtomNegate), + Dst: allowPath.Dst, Conn: allowPath.Conn}) } } for _, dstAtom := range denyPath.Dst { if !dstAtom.isTautology() { dstAtomNegate := dstAtom.negate().(atomicTerm) - if allowPath.Dst.isTautology() { - resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: Conjunction{&dstAtomNegate}, - Conn: allowPath.Conn}) - } else { - resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: *allowPath.Dst.copy().add(&dstAtomNegate), - Conn: allowPath.Conn}) - } + resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: *allowPath.Dst.copy().add(&dstAtomNegate), + Conn: allowPath.Conn}) } if !denyPath.Conn.IsAll() { // Connection of deny path is not tautology resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: allowPath.Dst, Conn: allowPath.Conn.Subtract(denyPath.Conn)}) } } - return &resAllowPaths + return resAllowPaths.removeEmpty().removeTautology() } // ConvertFWRuleToSymbolicPaths given a rule, converts its src, dst and Conn to SymbolicPaths diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index db96f3f..2bc3be9 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -255,14 +255,6 @@ func TestAllowDenyOptimizeEmptyPath(t *testing.T) { allowWithDeny := ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) fmt.Printf("allow path: %v with higher priority deny path:%v is:\n%v\n\n", allowPath.string(), denyPath.string(), allowWithDeny.String()) - negateAtomic1 := atomic1.negate().(atomicTerm) - require.Equal(t, true, atomic1.isNegateOf(negateAtomic1), "isNegateOf does not work") - for _, thisPath := range *allowWithDeny { - fmt.Printf("allowWithDeny.Src is %v isEmptySet? %v\n", thisPath.Src.string(), thisPath.Src.isEmptySet()) - fmt.Printf("path %v is Empty? %v\n", thisPath.string(), thisPath.isEmpty()) - } - require.Equal(t, true, (*allowWithDeny)[0].Src.isEmptySet(), "isEmptySet() does not work properly") - require.Equal(t, false, (*allowWithDeny)[1].Src.isEmptySet(), "isEmptySet() does not work properly") - newPath := allowWithDeny.removeEmpty() - fmt.Printf("newPath %v\n", newPath.String()) + require.Equal(t, "(s1 = str1) to (d1 != str1) All Connections", allowWithDeny.String(), + "optimized with deny not working properly") } From 52efd95f19bdd7bb647d8f7f828e87a97a17c7cf Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 13:16:00 +0200 Subject: [PATCH 40/76] removed non required comment --- pkg/symbolicexpr/symbolicPath.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 3ead597..0d70f71 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -82,8 +82,6 @@ func ComputeAllowGivenDenies(allowPaths, denyPaths *SymbolicPaths) *SymbolicPath // algorithm described in README of symbolicexpr func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *SymbolicPaths { resAllowPaths := SymbolicPaths{} - // in case deny path is open from both ends - empty set of allow paths, as will be the result - // assumption: if more than one term, then none is tautology for _, srcAtom := range denyPath.Src { if !srcAtom.isTautology() { srcAtomNegate := srcAtom.negate().(atomicTerm) From 41e71fb6198b30635ff789f818bf625362243956 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 14:29:32 +0200 Subject: [PATCH 41/76] added non-trivial connections to unit tests --- pkg/symbolicexpr/symbolicPath.go | 2 +- pkg/symbolicexpr/symbolicexpr_test.go | 21 +++++++++++---------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 0d70f71..3b0a85d 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -8,7 +8,7 @@ import ( ) func (path *SymbolicPath) string() string { - return path.Src.string() + " to " + path.Dst.string() + " " + path.Conn.String() + return path.Conn.String() + " from " + path.Src.string() + " to " + path.Dst.string() } // if the source or destination is empty then so is the entire path diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 2bc3be9..ff18fde 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -29,10 +29,9 @@ func TestSymbolicPaths(t *testing.T) { negateAtomic := atomic.negate().(atomicTerm) conjDst = *conjDst.add(&negateAtomic) } - conjSymbolicPath := SymbolicPath{Src: conjSrc, Dst: conjDst, Conn: netset.AllTransports()} + conjSymbolicPath := SymbolicPath{Src: conjSrc, Dst: conjDst, Conn: netset.AllTCPTransport()} fmt.Printf("\nconjSymbolicPath:\n%v\n", conjSymbolicPath.string()) - require.Equal(t, "(t1 = str1 and t2 = str2 and t3 = str3) to (t1 != str1 and t2 != str2 and t3 != str3)"+ - " All Connections", + require.Equal(t, "TCP from (t1 = str1 and t2 = str2 and t3 = str3) to (t1 != str1 and t2 != str2 and t3 != str3)", conjSymbolicPath.string(), "conjSymbolicPath not as expected") println("conjEmpty", conjEmpty.string()) require.Equal(t, emptySet, conjEmpty.string(), "empty conjunction not as expected") @@ -40,12 +39,13 @@ func TestSymbolicPaths(t *testing.T) { // Input: // allow symbolic path: -// src: (s1 = str1) dst: (d1 = str1) +// src: (s1 = str1) dst: (d1 = str1) All Connection // deny symbolic path: -// src: (s2 = str2) dst: (d2 = str2) +// src: (s2 = str2) dst: (d2 = str2) UDP // Output allow paths: -// src: (s1 = str1 and s2 != str2) dst (d1 = str1) -// src: (s1 = str1) dst: (d1 = str1 and d2 != str2) +// src: (s1 = str1 and s2 != str2) dst (d1 = str1) All connection +// src: (s1 = str1) dst: (d1 = str1 and d2 != str2) All connection +// src: (s1 = str1) dst: (d1 = str1) ICMP, TCP // allow symbolic paths: func TestComputeAllowGivenDenySingleTermEach(t *testing.T) { conjSrc1, conjDst1, conjSrc2, conjDst2 := Conjunction{}, Conjunction{}, Conjunction{}, Conjunction{} @@ -62,12 +62,13 @@ func TestComputeAllowGivenDenySingleTermEach(t *testing.T) { atomicDst2 := &atomicTerm{property: testDst2, toVal: "str2"} conjDst2 = *conjDst2.add(atomicDst2) allowPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTransports()} - denyPath := SymbolicPath{Src: conjSrc2, Dst: conjDst2, Conn: netset.AllTransports()} + denyPath := SymbolicPath{Src: conjSrc2, Dst: conjDst2, Conn: netset.AllUDPTransport()} fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) - require.Equal(t, "(s1 = str1 and s2 != str2) to (d1 = str1) All Connections\n"+ - "(s1 = str1) to (d1 = str1 and d2 != str2) All Connections", + require.Equal(t, "All Connections from (s1 = str1 and s2 != str2) to (d1 = str1)\n"+ + "All Connections from (s1 = str1) to (d1 = str1 and d2 != str2)\n"+ + "ICMP,TCP from (s1 = str1) to (d1 = str1)", allowGivenDeny.String(), "allowGivenDeny single term computation not as expected") } From b89f3dd52baa0c190423fe5097ac485edb51eda0 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 15:42:13 +0200 Subject: [PATCH 42/76] added another test --- pkg/symbolicexpr/symbolicexpr_test.go | 29 +++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index ff18fde..50cc907 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -47,7 +47,7 @@ func TestSymbolicPaths(t *testing.T) { // src: (s1 = str1) dst: (d1 = str1 and d2 != str2) All connection // src: (s1 = str1) dst: (d1 = str1) ICMP, TCP // allow symbolic paths: -func TestComputeAllowGivenDenySingleTermEach(t *testing.T) { +func TestComputeAllowGivenDenySingleTermEach1(t *testing.T) { conjSrc1, conjDst1, conjSrc2, conjDst2 := Conjunction{}, Conjunction{}, Conjunction{}, Conjunction{} testSrc1 := initTestTag("s1") atomic1 := &atomicTerm{property: testSrc1, toVal: "str1"} @@ -72,9 +72,34 @@ func TestComputeAllowGivenDenySingleTermEach(t *testing.T) { allowGivenDeny.String(), "allowGivenDeny single term computation not as expected") } +func TestComputeAllowGivenDenySingleTermEach2(t *testing.T) { + conjSrc1, conjDst1, conjSrc2, conjDst2 := Conjunction{}, Conjunction{}, Conjunction{}, Conjunction{} + testSrc1 := initTestTag("s1") + atomic1 := &atomicTerm{property: testSrc1, toVal: "str1"} + conjSrc1 = *conjSrc1.add(atomic1) + testDst1 := initTestTag("d1") + atomicDst1 := &atomicTerm{property: testDst1, toVal: "str1"} + conjDst1 = *conjDst1.add(atomicDst1) + testSrc2 := initTestTag("s2") + atomic2 := &atomicTerm{property: testSrc2, toVal: "str2"} + conjSrc2 = *conjSrc2.add(atomic2) + testDst2 := initTestTag("d2") + atomicDst2 := &atomicTerm{property: testDst2, toVal: "str2"} + conjDst2 = *conjDst2.add(atomicDst2) + allowPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllUDPTransport()} + denyPath := SymbolicPath{Src: conjSrc2, Dst: conjDst2, Conn: netset.AllTCPTransport()} + fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) + allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) + // todo: output will be just the original allow path after basic optimization + fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) + require.Equal(t, "UDP from (s1 = str1 and s2 != str2) to (d1 = str1)\n"+ + "UDP from (s1 = str1) to (d1 = str1 and d2 != str2)\nUDP from (s1 = str1) to (d1 = str1)", + allowGivenDeny.String(), "allowGivenDeny single term computation not as expected") +} + // Input: // allow symbolic path: -// (s1 = str1 and s2 = str2 and s3 = str3) dst: (s1 = str1 and s2 = str2 and s3 = str3) +// (s1 = str1 and s2 = str2 and s3 = str3) dst: (s1 = str1 and s2 = str2 and s3 = str3) co // deny symbolic path: // src: (s1` = str1` and s2` = str2` and s3` = str3`) dst: (s1` = str1` and s2` = str2` and s3` = str3`) // Output allow paths: From 3bc188cbbcd7dc0aa01a1e4b0c76a02b9a138da5 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 16:04:57 +0200 Subject: [PATCH 43/76] update tests with new format --- pkg/symbolicexpr/symbolicexpr_test.go | 69 +++++++++++++-------------- 1 file changed, 34 insertions(+), 35 deletions(-) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 50cc907..184a641 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -99,9 +99,9 @@ func TestComputeAllowGivenDenySingleTermEach2(t *testing.T) { // Input: // allow symbolic path: -// (s1 = str1 and s2 = str2 and s3 = str3) dst: (s1 = str1 and s2 = str2 and s3 = str3) co +// (s1 = str1 and s2 = str2 and s3 = str3) dst: (s1 = str1 and s2 = str2 and s3 = str3) conn TCP // deny symbolic path: -// src: (s1` = str1` and s2` = str2` and s3` = str3`) dst: (s1` = str1` and s2` = str2` and s3` = str3`) +// src: (s1` = str1` and s2` = str2` and s3` = str3`) dst: (s1` = str1` and s2` = str2` and s3` = str3`) conn ALL // Output allow paths: // src: (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) dst: (s1 = str1 and s2 = str2 and s3 = str3) // src: (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`) dst: (s1 = str1 and s2 = str2 and s3 = str3) @@ -119,27 +119,25 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { atomicDeny := &atomicTerm{property: testDeny, toVal: fmt.Sprintf("str%v`", i)} conjDeny = *conjDeny.add(atomicDeny) } - allowPath := SymbolicPath{Src: conjAllow, Dst: conjAllow, Conn: netset.AllTransports()} + allowPath := SymbolicPath{Src: conjAllow, Dst: conjAllow, Conn: netset.AllTCPTransport()} denyPath := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllTransports()} fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, - "(s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) to (s1 = str1 and s2 = str2 and s3 = str3) "+ - "All Connections\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`) to (s1 = str1 and s2 = str2 and s3 = str3)"+ - " All Connections\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`) to (s1 = str1 and s2 = str2 and s3 = str3)"+ - " All Connections\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`)"+ - " All Connections\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`)"+ - " All Connections\n"+ - "(s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`)"+ - " All Connections", + "TCP from (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ + "TCP from (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ + "TCP from (s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ + "TCP from (s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`)\n"+ + "TCP from (s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`)\n"+ + "TCP from (s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`)", allowGivenDeny.String(), "allowGivenDeny three terms computation not as expected") } +// todo: got here in enriching tests with non trivial connections. Make connection non-trivial when optimization +// +// of removing redundant path is added +// // Input: // allow symbolic path: // src: src: (*) dst: (*) @@ -166,9 +164,9 @@ func TestComputeAllowGivenDenyAllowTautology(t *testing.T) { allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, - "(s1` != str1`) to (*) All Connections\n(s2` != str2`) to (*) All Connections\n"+ - "(s3` != str3`) to (*) All Connections\n(*) to (s1` != str1`) All Connections\n"+ - "(*) to (s2` != str2`) All Connections\n(*) to (s3` != str3`) All Connections", allowGivenDeny.String(), + "All Connections from (s1` != str1`) to (*)\nAll Connections from (s2` != str2`) to (*)\n"+ + "All Connections from (s3` != str3`) to (*)\nAll Connections from (*) to (s1` != str1`)\n"+ + "All Connections from (*) to (s2` != str2`)\nAll Connections from (*) to (s3` != str3`)", allowGivenDeny.String(), "allowGivenDeny allow tautology computation not as expected") } @@ -242,22 +240,23 @@ func TestComputeAllowGivenDenies(t *testing.T) { fmt.Printf("allowPaths:\n%v\ndenyPaths:\n%v\n", allowPaths.String(), denyPaths.String()) res := ComputeAllowGivenDenies(&allowPaths, &denyPaths) fmt.Printf("ComputeAllowGivenDenies:\n%v\n", res.String()) - require.Equal(t, "(tag = t0 and segment != s0 and segment != s2 and segment != s4) to (tag = t1) All Connections\n"+ - "(tag = t0 and segment != s0 and segment != s2) to (tag = t1 and segment != s5) All Connections\n"+ - "(tag = t0 and segment != s0 and segment != s4) to (tag = t1 and segment != s3) All Connections\n"+ - "(tag = t0 and segment != s0) to (tag = t1 and segment != s3 and segment != s5) All Connections\n"+ - "(tag = t0 and segment != s2 and segment != s4) to (tag = t1 and segment != s1) All Connections\n"+ - "(tag = t0 and segment != s2) to (tag = t1 and segment != s1 and segment != s5) All Connections\n"+ - "(tag = t0 and segment != s4) to (tag = t1 and segment != s1 and segment != s3) All Connections\n"+ - "(tag = t0) to (tag = t1 and segment != s1 and segment != s3 and segment != s5) All Connections\n"+ - "(tag = t2 and segment != s0 and segment != s2 and segment != s4) to (tag = t3) All Connections\n"+ - "(tag = t2 and segment != s0 and segment != s2) to (tag = t3 and segment != s5) All Connections\n"+ - "(tag = t2 and segment != s0 and segment != s4) to (tag = t3 and segment != s3) All Connections\n"+ - "(tag = t2 and segment != s0) to (tag = t3 and segment != s3 and segment != s5) All Connections\n"+ - "(tag = t2 and segment != s2 and segment != s4) to (tag = t3 and segment != s1) All Connections\n"+ - "(tag = t2 and segment != s2) to (tag = t3 and segment != s1 and segment != s5) All Connections\n"+ - "(tag = t2 and segment != s4) to (tag = t3 and segment != s1 and segment != s3) All Connections\n"+ - "(tag = t2) to (tag = t3 and segment != s1 and segment != s3 and segment != s5) All Connections", + require.Equal(t, + "All Connections from (tag = t0 and segment != s0 and segment != s2 and segment != s4) to (tag = t1)\n"+ + "All Connections from (tag = t0 and segment != s0 and segment != s2) to (tag = t1 and segment != s5)\n"+ + "All Connections from (tag = t0 and segment != s0 and segment != s4) to (tag = t1 and segment != s3)\n"+ + "All Connections from (tag = t0 and segment != s0) to (tag = t1 and segment != s3 and segment != s5)\n"+ + "All Connections from (tag = t0 and segment != s2 and segment != s4) to (tag = t1 and segment != s1)\n"+ + "All Connections from (tag = t0 and segment != s2) to (tag = t1 and segment != s1 and segment != s5)\n"+ + "All Connections from (tag = t0 and segment != s4) to (tag = t1 and segment != s1 and segment != s3)\n"+ + "All Connections from (tag = t0) to (tag = t1 and segment != s1 and segment != s3 and segment != s5)\n"+ + "All Connections from (tag = t2 and segment != s0 and segment != s2 and segment != s4) to (tag = t3)\n"+ + "All Connections from (tag = t2 and segment != s0 and segment != s2) to (tag = t3 and segment != s5)\n"+ + "All Connections from (tag = t2 and segment != s0 and segment != s4) to (tag = t3 and segment != s3)\n"+ + "All Connections from (tag = t2 and segment != s0) to (tag = t3 and segment != s3 and segment != s5)\n"+ + "All Connections from (tag = t2 and segment != s2 and segment != s4) to (tag = t3 and segment != s1)\n"+ + "All Connections from (tag = t2 and segment != s2) to (tag = t3 and segment != s1 and segment != s5)\n"+ + "All Connections from (tag = t2 and segment != s4) to (tag = t3 and segment != s1 and segment != s3)\n"+ + "All Connections from (tag = t2) to (tag = t3 and segment != s1 and segment != s3 and segment != s5)", ComputeAllowGivenDenies(&allowPaths, &denyPaths).String(), "ComputeAllowGivenDenies computation not as expected") } @@ -281,6 +280,6 @@ func TestAllowDenyOptimizeEmptyPath(t *testing.T) { allowWithDeny := ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) fmt.Printf("allow path: %v with higher priority deny path:%v is:\n%v\n\n", allowPath.string(), denyPath.string(), allowWithDeny.String()) - require.Equal(t, "(s1 = str1) to (d1 != str1) All Connections", allowWithDeny.String(), + require.Equal(t, "All Connections from (s1 = str1) to (d1 != str1)", allowWithDeny.String(), "optimized with deny not working properly") } From c3a033451e8c5771f63b6cac950caec74421dc17 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 16:09:11 +0200 Subject: [PATCH 44/76] update tests with new format --- .../tests_expected_output/ExampleDumbeldore.txt | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt index 8365003..b944227 100644 --- a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt +++ b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt @@ -1,13 +1,13 @@ symbolicInbound Rules: ~~~~~~~~~~~~~~~~~~~~~~~ - category: 4 action: allow paths: (group = DumbledoreAll) to (*) All Connections - category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) All Connections - category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) All Connections - category: 4 action: deny paths: (*) to (*) All Connections + category: 4 action: allow paths: All Connections from (group = DumbledoreAll) to (*) + category: 4 action: deny paths: All Connections from (group = DumbledoreNoSly) to (group = Slytherin) + category: 4 action: allow paths: All Connections from (group = DumbledoreNoSly) to (*) + category: 4 action: deny paths: All Connections from (*) to (*) symbolicOutbound Rules: ~~~~~~~~~~~~~~~~~~~~~~~~~ - category: 4 action: allow paths: (group = DumbledoreAll) to (*) All Connections - category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) All Connections - category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) All Connections - category: 4 action: deny paths: (*) to (*) All Connections \ No newline at end of file + category: 4 action: allow paths: All Connections from (group = DumbledoreAll) to (*) + category: 4 action: deny paths: All Connections from (group = DumbledoreNoSly) to (group = Slytherin) + category: 4 action: allow paths: All Connections from (group = DumbledoreNoSly) to (*) + category: 4 action: deny paths: All Connections from (*) to (*) \ No newline at end of file From ab48be20c2e521a57643de91889fb3c850ef6b2c Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 16:29:16 +0200 Subject: [PATCH 45/76] lint --- pkg/symbolicexpr/conjunction.go | 1 + pkg/symbolicexpr/symbolicexpr_test.go | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/symbolicexpr/conjunction.go b/pkg/symbolicexpr/conjunction.go index 10582fe..9839e38 100644 --- a/pkg/symbolicexpr/conjunction.go +++ b/pkg/symbolicexpr/conjunction.go @@ -28,6 +28,7 @@ func (c *Conjunction) copy() *Conjunction { return &newC } +// nolint:unused // will be used func (c *Conjunction) isTautology() bool { if len(*c) == 1 && (*c)[0].isTautology() { return true diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 184a641..420faa3 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -2,9 +2,9 @@ package symbolicexpr import ( "fmt" - "github.com/np-guard/models/pkg/netset" "testing" + "github.com/np-guard/models/pkg/netset" "github.com/stretchr/testify/require" ) From 9a311416dd978112cb29a0d3b0debff3bb202745 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 16:35:36 +0200 Subject: [PATCH 46/76] lint --- pkg/symbolicexpr/conjunction.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/symbolicexpr/conjunction.go b/pkg/symbolicexpr/conjunction.go index 9839e38..31c2ac3 100644 --- a/pkg/symbolicexpr/conjunction.go +++ b/pkg/symbolicexpr/conjunction.go @@ -28,7 +28,7 @@ func (c *Conjunction) copy() *Conjunction { return &newC } -// nolint:unused // will be used +// nolint:unused //will be used func (c *Conjunction) isTautology() bool { if len(*c) == 1 && (*c)[0].isTautology() { return true From ea0429b6c386328d67a2ed3424a33a48ae6062a2 Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 16:38:04 +0200 Subject: [PATCH 47/76] lint --- pkg/symbolicexpr/conjunction.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/symbolicexpr/conjunction.go b/pkg/symbolicexpr/conjunction.go index 31c2ac3..4a6fd9c 100644 --- a/pkg/symbolicexpr/conjunction.go +++ b/pkg/symbolicexpr/conjunction.go @@ -28,7 +28,7 @@ func (c *Conjunction) copy() *Conjunction { return &newC } -// nolint:unused //will be used +//nolint:unused //will be used func (c *Conjunction) isTautology() bool { if len(*c) == 1 && (*c)[0].isTautology() { return true From 8c6b3b6bad5867449807441b66a4c4fc961df64b Mon Sep 17 00:00:00 2001 From: shirim Date: Sun, 22 Dec 2024 16:41:29 +0200 Subject: [PATCH 48/76] lint --- pkg/symbolicexpr/symbolicexpr_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 420faa3..daa4663 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -4,8 +4,9 @@ import ( "fmt" "testing" - "github.com/np-guard/models/pkg/netset" "github.com/stretchr/testify/require" + + "github.com/np-guard/models/pkg/netset" ) type testTag struct { From f0217e24b076206972546b8e68c8c9b3549ddd7c Mon Sep 17 00:00:00 2001 From: ShiriMoran <139739065+ShiriMoran@users.noreply.github.com> Date: Sun, 22 Dec 2024 16:43:34 +0200 Subject: [PATCH 49/76] Synthesis test (#95) --- pkg/synthesis/model.go | 6 ++-- pkg/synthesis/synthesis_test.go | 30 +++++++++++++++++-- .../ExampleDumbeldore.txt | 13 ++++++++ 3 files changed, 43 insertions(+), 6 deletions(-) create mode 100644 pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt diff --git a/pkg/synthesis/model.go b/pkg/synthesis/model.go index 27db3d1..f0f16b0 100644 --- a/pkg/synthesis/model.go +++ b/pkg/synthesis/model.go @@ -15,9 +15,9 @@ import ( //nolint:all // todo: tmp for defs without implementation type AbstractModelSyn struct { vms VMs - // groupsToVms includes atomic NSX groups; e.g., groups defined over other entities (such as tags) are not included - groupsToVms map[*collector.Group]VMs // todo compute - tagsToVms map[*collector.Tag]VMs // todo compute + // epToGroups includes atomic NSX groups; e.g., groups defined over other entities (such as tags) are not included + epToGroups map[*endpoints.VM]collector.Group // todo compute + epToTags map[*endpoints.VM]Tags // todo compute // todo: add similar maps to OS, hostname policy []*symbolicPolicy // with default deny } diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index aa9da25..0df53cc 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -2,6 +2,9 @@ package synthesis import ( "fmt" + "os" + "path/filepath" + "strings" "testing" "github.com/stretchr/testify/require" @@ -11,6 +14,12 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/model" ) +// todo... +const ( + expectedOutput = "tests_expected_output/" + carriageReturn = "\r" +) + type synthesisTest struct { name string exData data.Example @@ -26,12 +35,16 @@ var allTests = []synthesisTest{ func (a *synthesisTest) runPreprocessing(t *testing.T) { rc := data.ExamplesGeneration(a.exData) parser := model.NewNSXConfigParserFromResourcesContainer(rc) - err := parser.RunParser() - require.Nil(t, err) + err1 := parser.RunParser() + require.Nil(t, err1) config := parser.GetConfig() policy := preProcessing(config.Fw.CategoriesSpecs) fmt.Println(policy.string()) - // todo: test via comparing output to files in a separate PR (issue with window in analyzer tests) + expectedOutputFileName := filepath.Join(getTestsDirOut(), a.name+".txt") + expectedOutput, err2 := os.ReadFile(expectedOutputFileName) + require.Nil(t, err2) + expectedOutputStr := string(expectedOutput) + require.Equal(t, cleanStr(policy.string()), cleanStr(expectedOutputStr), "output not as expected") } func TestPreprocessing(t *testing.T) { @@ -41,3 +54,14 @@ func TestPreprocessing(t *testing.T) { test.runPreprocessing(t) } } + +// getTestsDirOut returns the path to the dir where test output files are located +func getTestsDirOut() string { + currentDir, _ := os.Getwd() + return filepath.Join(currentDir, expectedOutput) +} + +// comparison should be insensitive to line comparators; cleaning strings from line comparators +func cleanStr(str string) string { + return strings.ReplaceAll(strings.ReplaceAll(str, "/n", ""), carriageReturn, "") +} diff --git a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt new file mode 100644 index 0000000..865373e --- /dev/null +++ b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt @@ -0,0 +1,13 @@ + +symbolicInbound Rules: +~~~~~~~~~~~~~~~~~~~~~~~ + category: 4 action: allow paths: (group = DumbledoreAll) to (*) + category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) + category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) + category: 4 action: deny paths: (*) to (*) +symbolicOutbound Rules: +~~~~~~~~~~~~~~~~~~~~~~~~~ + category: 4 action: allow paths: (group = DumbledoreAll) to (*) + category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) + category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) + category: 4 action: deny paths: (*) to (*) \ No newline at end of file From c13cd141946894a7bd55de08518310e604719980 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 11:28:58 +0200 Subject: [PATCH 50/76] don't add a dup term --- pkg/symbolicexpr/conjunction.go | 31 +++++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/pkg/symbolicexpr/conjunction.go b/pkg/symbolicexpr/conjunction.go index 4a6fd9c..cd46e33 100644 --- a/pkg/symbolicexpr/conjunction.go +++ b/pkg/symbolicexpr/conjunction.go @@ -18,6 +18,9 @@ func (c *Conjunction) string() string { } func (c *Conjunction) add(atomic *atomicTerm) *Conjunction { + if c.contains(atomic) { + return c + } res := append(*c, atomic) return &res } @@ -28,7 +31,6 @@ func (c *Conjunction) copy() *Conjunction { return &newC } -//nolint:unused //will be used func (c *Conjunction) isTautology() bool { if len(*c) == 1 && (*c)[0].isTautology() { return true @@ -49,7 +51,7 @@ func (c *Conjunction) removeTautology() Conjunction { return newC } -// checks whether the Conjunction is empty: either syntactically, or contains an atomicTerm and its negation +// checks whether the conjunction is empty: either syntactically, or contains an atomicTerm and its negation func (c *Conjunction) isEmptySet() bool { if len(*c) == 0 { return true @@ -65,3 +67,28 @@ func (c *Conjunction) isEmptySet() bool { } return false } + +// checks whether conjunction other is implies by conjunction c +func (c *Conjunction) implies(other *Conjunction) bool { + if other.isTautology() { + return true + } + if len(*c) == 0 || len(*other) == 0 { + return false + } + for _, atomicTerm := range *other { + if !c.contains(atomicTerm) { + return false + } + } + return true +} + +func (c *Conjunction) contains(atom atomic) bool { + for _, atomicTerm := range *c { + if atomicTerm.string() == (atom).string() { + return true + } + } + return false +} From 1f07ec6879f7bf68855725cc4ccad2905dd6be1e Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 12:48:27 +0200 Subject: [PATCH 51/76] optimization for disjoint paths --- pkg/symbolicexpr/conjunction.go | 18 ++++++++++++++++++ pkg/symbolicexpr/symbolicPath.go | 20 ++++++++++++++++++-- 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/pkg/symbolicexpr/conjunction.go b/pkg/symbolicexpr/conjunction.go index cd46e33..95c4e53 100644 --- a/pkg/symbolicexpr/conjunction.go +++ b/pkg/symbolicexpr/conjunction.go @@ -84,6 +84,24 @@ func (c *Conjunction) implies(other *Conjunction) bool { return true } +// checks whether conjunction other is disjoint to conjunction c +// this is the case if there's a term in c and its contradiction in other +// we will later add hints +func (c *Conjunction) disjoint(other *Conjunction) bool { + if len(*c) == 0 || len(*other) == 0 { + return false + } + if other.isTautology() || c.isTautology() { + return false + } + for _, atomicTerm := range *other { + if c.contains(atomicTerm.negate()) { + return true + } + } + return false +} + func (c *Conjunction) contains(atom atomic) bool { for _, atomicTerm := range *c { if atomicTerm.string() == (atom).string() { diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 3b0a85d..2500f99 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -13,7 +13,13 @@ func (path *SymbolicPath) string() string { // if the source or destination is empty then so is the entire path func (path *SymbolicPath) isEmpty() bool { - return path.Src.isEmptySet() || path.Dst.isEmptySet() || path.Conn.IsEmpty() + return path.Conn.IsEmpty() || path.Src.isEmptySet() || path.Dst.isEmptySet() +} + +// checks whether paths are disjoint. This is the case when one of the path's components (src, dst, conn) are disjoint +func (path *SymbolicPath) disJointPaths(other *SymbolicPath) bool { + return (*path).Conn.Intersect((*other).Conn).IsEmpty() || (*path).Src.disjoint(&(*other).Src) || + (*path).Dst.disjoint(&(*other).Dst) } func (paths *SymbolicPaths) String() string { @@ -62,9 +68,19 @@ func ComputeAllowGivenDenies(allowPaths, denyPaths *SymbolicPaths) *SymbolicPath } res := SymbolicPaths{} for _, allowPath := range *allowPaths { + relevantDenyPaths := SymbolicPaths{} + for _, denyPath := range *denyPaths { + if !allowPath.disJointPaths(denyPath) { + relevantDenyPaths = append(relevantDenyPaths, denyPath) + } + } + if len(relevantDenyPaths) == 0 { // the denys paths are not relevant for this allow. This allow path remains as is + res = append(res, allowPath) + continue + } var computedAllowPaths, newComputedAllowPaths SymbolicPaths newComputedAllowPaths = SymbolicPaths{allowPath} - for _, denyPath := range *denyPaths { + for _, denyPath := range relevantDenyPaths { computedAllowPaths = newComputedAllowPaths newComputedAllowPaths = SymbolicPaths{} for _, computedAllow := range computedAllowPaths { From 7eafb2e7abbe030c92a2b9b424f37690c17cc8d8 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 13:56:55 +0200 Subject: [PATCH 52/76] add corner case handling --- pkg/symbolicexpr/conjunction.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pkg/symbolicexpr/conjunction.go b/pkg/symbolicexpr/conjunction.go index 95c4e53..78189e0 100644 --- a/pkg/symbolicexpr/conjunction.go +++ b/pkg/symbolicexpr/conjunction.go @@ -43,11 +43,17 @@ func (c *Conjunction) removeTautology() Conjunction { return *c } newC := Conjunction{} + tautologyRemoved := false for _, atom := range *c { if !atom.isTautology() { newC = append(newC, atom) + } else { + tautologyRemoved = true } } + if len(newC) == 0 && tautologyRemoved { + return Conjunction{tautology{}} + } return newC } From 9ee3b89a3a6ff9978cd80fc28d61ab45e5d202ff Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 14:03:43 +0200 Subject: [PATCH 53/76] update test to include latest optimization --- pkg/symbolicexpr/symbolicexpr_test.go | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index daa4663..45f4f02 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -91,11 +91,17 @@ func TestComputeAllowGivenDenySingleTermEach2(t *testing.T) { denyPath := SymbolicPath{Src: conjSrc2, Dst: conjDst2, Conn: netset.AllTCPTransport()} fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) - // todo: output will be just the original allow path after basic optimization fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) + // computeAllowGivenAllowHigherDeny not optimized require.Equal(t, "UDP from (s1 = str1 and s2 != str2) to (d1 = str1)\n"+ "UDP from (s1 = str1) to (d1 = str1 and d2 != str2)\nUDP from (s1 = str1) to (d1 = str1)", allowGivenDeny.String(), "allowGivenDeny single term computation not as expected") + // ComputeAllowGivenDenies optimize + allowGivenDenyPaths := *ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) + fmt.Printf("allowGivenDenyPaths is %v\n", allowGivenDenyPaths.String()) + require.Equal(t, "UDP from (s1 = str1) to (d1 = str1)", allowGivenDenyPaths.String(), + "ComputeAllowGivenDenies does not work as expected") + } // Input: From 8839fc99fc1df1cc057707108c92c1d1309e121b Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 14:23:45 +0200 Subject: [PATCH 54/76] more tests --- pkg/symbolicexpr/symbolicexpr_test.go | 35 ++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 45f4f02..0af07fc 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -2,6 +2,7 @@ package symbolicexpr import ( "fmt" + "github.com/np-guard/models/pkg/netp" "testing" "github.com/stretchr/testify/require" @@ -47,7 +48,6 @@ func TestSymbolicPaths(t *testing.T) { // src: (s1 = str1 and s2 != str2) dst (d1 = str1) All connection // src: (s1 = str1) dst: (d1 = str1 and d2 != str2) All connection // src: (s1 = str1) dst: (d1 = str1) ICMP, TCP -// allow symbolic paths: func TestComputeAllowGivenDenySingleTermEach1(t *testing.T) { conjSrc1, conjDst1, conjSrc2, conjDst2 := Conjunction{}, Conjunction{}, Conjunction{}, Conjunction{} testSrc1 := initTestTag("s1") @@ -73,6 +73,13 @@ func TestComputeAllowGivenDenySingleTermEach1(t *testing.T) { allowGivenDeny.String(), "allowGivenDeny single term computation not as expected") } +// Input: +// allow symbolic path: +// src: (s1 = str1) dst: (d1 = str1) UDP +// deny symbolic path: +// src: (s2 = str2) dst: (d2 = str2) TCP +// Output allow paths: +// src: (s1 = str1) dst: (d1 = str1) UDP func TestComputeAllowGivenDenySingleTermEach2(t *testing.T) { conjSrc1, conjDst1, conjSrc2, conjDst2 := Conjunction{}, Conjunction{}, Conjunction{}, Conjunction{} testSrc1 := initTestTag("s1") @@ -104,6 +111,32 @@ func TestComputeAllowGivenDenySingleTermEach2(t *testing.T) { } +// Input: +// allow symbolic path: +// src: (s1 = str1) dst: (d1 = str1) TCP +// deny symbolic path: +// src: (s1 = str1) dst: (d1 = str2) TCP src port 0-50 +// Output allow paths: +// src: (s1 = str1) dst: (d1 = str1) TCP src port TCP src-ports: 51-65535 +func TestComputeAllowGivenDenySingleTermEach3(t *testing.T) { + conjSrc1, conjDst1 := Conjunction{}, Conjunction{} + testSrc1 := initTestTag("s1") + atomic1 := &atomicTerm{property: testSrc1, toVal: "str1"} + conjSrc1 = *conjSrc1.add(atomic1) + testDst1 := initTestTag("d1") + atomicDst1 := &atomicTerm{property: testDst1, toVal: "str1"} + conjDst1 = *conjDst1.add(atomicDst1) + allowPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTCPTransport()} + denyPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.NewTCPTransport(0, 50, + netp.MinPort, netp.MaxPort)} + fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) + allowGivenDenyPaths := *ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) + fmt.Printf("allowGivenDenyPaths is %v\n", allowGivenDenyPaths.String()) + require.Equal(t, "TCP src-ports: 51-65535 from (s1 = str1) to (d1 = str1)", allowGivenDenyPaths.String(), + "ComputeAllowGivenDenies does not work as expected") + +} + // Input: // allow symbolic path: // (s1 = str1 and s2 = str2 and s3 = str3) dst: (s1 = str1 and s2 = str2 and s3 = str3) conn TCP From 666f22eac9a88bb3af0734d70825a0ca8eccff34 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 14:27:42 +0200 Subject: [PATCH 55/76] another test --- pkg/symbolicexpr/symbolicexpr_test.go | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 0af07fc..bdfd311 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -137,6 +137,29 @@ func TestComputeAllowGivenDenySingleTermEach3(t *testing.T) { } +// Input: +// allow symbolic path: +// src: (s1 = str1) dst: (d1 = str1) TCP +// deny symbolic path: +// src: (s1 = str1) dst: (d1 = str2) TCP +// Output allow paths: empty set +func TestComputeAllowGivenDenySingleTermEach4(t *testing.T) { + conjSrc1, conjDst1 := Conjunction{}, Conjunction{} + testSrc1 := initTestTag("s1") + atomic1 := &atomicTerm{property: testSrc1, toVal: "str1"} + conjSrc1 = *conjSrc1.add(atomic1) + testDst1 := initTestTag("d1") + atomicDst1 := &atomicTerm{property: testDst1, toVal: "str1"} + conjDst1 = *conjDst1.add(atomicDst1) + path := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTCPTransport()} + fmt.Printf("allowPath is %v\ndenyPath is %v\n", path.string(), path.string()) + allowGivenDenyPaths := *ComputeAllowGivenDenies(&SymbolicPaths{&path}, &SymbolicPaths{&path}) + fmt.Printf("allowGivenDenyPaths is %v\n", allowGivenDenyPaths.String()) + require.Equal(t, "empty set ", allowGivenDenyPaths.String(), + "ComputeAllowGivenDenies does not work as expected") + +} + // Input: // allow symbolic path: // (s1 = str1 and s2 = str2 and s3 = str3) dst: (s1 = str1 and s2 = str2 and s3 = str3) conn TCP From bc2a6d10b0f5f9585335b3b61da595bf3d5c10c2 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 15:27:14 +0200 Subject: [PATCH 56/76] enriched test --- pkg/symbolicexpr/symbolicexpr_test.go | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index bdfd311..7951c73 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -165,6 +165,7 @@ func TestComputeAllowGivenDenySingleTermEach4(t *testing.T) { // (s1 = str1 and s2 = str2 and s3 = str3) dst: (s1 = str1 and s2 = str2 and s3 = str3) conn TCP // deny symbolic path: // src: (s1` = str1` and s2` = str2` and s3` = str3`) dst: (s1` = str1` and s2` = str2` and s3` = str3`) conn ALL +// src: (s1 = str1 and s2 = str2 and s3 = str3) dst: (s1 = str1 and s2 = str2 and s3 = str3) conn UDP (no effect) // Output allow paths: // src: (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) dst: (s1 = str1 and s2 = str2 and s3 = str3) // src: (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`) dst: (s1 = str1 and s2 = str2 and s3 = str3) @@ -184,9 +185,11 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { } allowPath := SymbolicPath{Src: conjAllow, Dst: conjAllow, Conn: netset.AllTCPTransport()} denyPath := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllTransports()} + denyPathNoEffect := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllUDPTransport()} + allowGivenDenyPaths := *ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, + &SymbolicPaths{&denyPath, &denyPathNoEffect}) fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) - allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) - fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) + fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDenyPaths.String()) require.Equal(t, "TCP from (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ "TCP from (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ @@ -194,13 +197,9 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { "TCP from (s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`)\n"+ "TCP from (s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s2` != str2`)\n"+ "TCP from (s1 = str1 and s2 = str2 and s3 = str3) to (s1 = str1 and s2 = str2 and s3 = str3 and s3` != str3`)", - allowGivenDeny.String(), "allowGivenDeny three terms computation not as expected") + allowGivenDenyPaths.String(), "allowGivenDeny three terms computation not as expected") } -// todo: got here in enriching tests with non trivial connections. Make connection non-trivial when optimization -// -// of removing redundant path is added -// // Input: // allow symbolic path: // src: src: (*) dst: (*) From 58761752035241bd537743e8b42c5766a5dc3bc2 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 15:41:51 +0200 Subject: [PATCH 57/76] enriched test and fixed a bug that the enriched test revealed --- pkg/symbolicexpr/symbolicPath.go | 8 ++++---- pkg/symbolicexpr/symbolicexpr_test.go | 9 ++++++--- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 2500f99..f3776ed 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -111,10 +111,10 @@ func computeAllowGivenAllowHigherDeny(allowPath, denyPath SymbolicPath) *Symboli resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: *allowPath.Dst.copy().add(&dstAtomNegate), Conn: allowPath.Conn}) } - if !denyPath.Conn.IsAll() { // Connection of deny path is not tautology - resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: allowPath.Dst, - Conn: allowPath.Conn.Subtract(denyPath.Conn)}) - } + } + if !denyPath.Conn.IsAll() { // Connection of deny path is not tautology + resAllowPaths = append(resAllowPaths, &SymbolicPath{Src: allowPath.Src, Dst: allowPath.Dst, + Conn: allowPath.Conn.Subtract(denyPath.Conn)}) } return resAllowPaths.removeEmpty().removeTautology() } diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 7951c73..4939c1f 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -204,8 +204,10 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { // allow symbolic path: // src: src: (*) dst: (*) // deny symbolic path: -// src: src: (s1` = str1` and s2` = str2` and s3` = str3`) dst: (s1` = str1` and s2` = str2` and s3` = str3`) +// src: src: (s1` = str1` and s2` = str2` and s3` = str3`) TCP +// dst: (s1` = str1` and s2` = str2` and s3` = str3`) // Output allow paths: +// src: (*) dst: (*) UDP and ICMP // src: (s1` != str1`) dst: (*) // src: (s2` != str2`) dst: (*) // src: (s3` != str3`) dst: (*) @@ -221,14 +223,15 @@ func TestComputeAllowGivenDenyAllowTautology(t *testing.T) { } tautologyConj := Conjunction{tautology{}} allowPath := SymbolicPath{Src: tautologyConj, Dst: tautologyConj, Conn: netset.AllTransports()} - denyPath := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllTransports()} + denyPath := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllUDPTransport()} fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, "All Connections from (s1` != str1`) to (*)\nAll Connections from (s2` != str2`) to (*)\n"+ "All Connections from (s3` != str3`) to (*)\nAll Connections from (*) to (s1` != str1`)\n"+ - "All Connections from (*) to (s2` != str2`)\nAll Connections from (*) to (s3` != str3`)", allowGivenDeny.String(), + "All Connections from (*) to (s2` != str2`)\nAll Connections from (*) to (s3` != str3`)\n"+ + "ICMP,TCP from (*) to (*)", allowGivenDeny.String(), "allowGivenDeny allow tautology computation not as expected") } From 5ae995eb979d8508505ec429f671fb32f7b2b1e5 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 15:43:01 +0200 Subject: [PATCH 58/76] enriched test and fixed a bug that the enriched test revealed --- pkg/symbolicexpr/symbolicexpr_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 4939c1f..e96ae9f 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -204,10 +204,10 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { // allow symbolic path: // src: src: (*) dst: (*) // deny symbolic path: -// src: src: (s1` = str1` and s2` = str2` and s3` = str3`) TCP +// src: src: (s1` = str1` and s2` = str2` and s3` = str3`) UDP // dst: (s1` = str1` and s2` = str2` and s3` = str3`) // Output allow paths: -// src: (*) dst: (*) UDP and ICMP +// src: (*) dst: (*) TCP and ICMP // src: (s1` != str1`) dst: (*) // src: (s2` != str2`) dst: (*) // src: (s3` != str3`) dst: (*) From 7f033c1dfb87e80e3bbac8cf059be4eca41ad6f0 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 15:51:22 +0200 Subject: [PATCH 59/76] enriched test --- pkg/symbolicexpr/symbolicexpr_test.go | 52 +++++++++++++-------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index e96ae9f..7ed25b0 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -261,21 +261,21 @@ func TestComputeAllowGivenDenyDenyTautology(t *testing.T) { // Input: // allow symbolic path: -// src: (tag = t0) dst: (tag = t1) +// src: (tag = t0) dst: (tag = t1) TCP // src: (tag = t2) dst: (tag = t3) // deny symbolic path: // src: (segment = s0) dst: (segment = s1) // src: (segment = s2) dst: (segment = s3) // src: (segment = s4) dst: (segment = s5) // Output allow paths: -// src: (tag = t0 and segment != s0 and segment != s2 and segment != s4) dst: (tag = t1) -// src: (tag = t0 and segment != s0 and segment != s2) dst: (tag = t1 and segment != s5) -// src: (tag = t0 and segment != s0 and segment != s4) dst: (tag = t1 and segment != s3) -// src: (tag = t0 and segment != s0) dst: (tag = t1 and segment != s3 and segment != s5) -// src: (tag = t0 and segment != s2 and segment != s4) dst: (tag = t1 and segment != s1) -// src: (tag = t0 and segment != s2) dst: (tag = t1 and segment != s1 and segment != s5) -// src: (tag = t0 and segment != s4) dst: (tag = t1 and segment != s1 and segment != s3) -// src: (tag = t0) dst: (tag = t1 and segment != s1 and segment != s3 and segment != s5) +// src: (tag = t0 and segment != s0 and segment != s2 and segment != s4) dst: (tag = t1) TCP +// src: (tag = t0 and segment != s0 and segment != s2) dst: (tag = t1 and segment != s5) TCP +// src: (tag = t0 and segment != s0 and segment != s4) dst: (tag = t1 and segment != s3) TCP +// src: (tag = t0 and segment != s0) dst: (tag = t1 and segment != s3 and segment != s5) TCP +// src: (tag = t0 and segment != s2 and segment != s4) dst: (tag = t1 and segment != s1) TCP +// src: (tag = t0 and segment != s2) dst: (tag = t1 and segment != s1 and segment != s5) TCP +// src: (tag = t0 and segment != s4) dst: (tag = t1 and segment != s1 and segment != s3) TCP +// src: (tag = t0) dst: (tag = t1 and segment != s1 and segment != s3 and segment != s5) TCP // src: (tag = t2 and segment != s0 and segment != s2 and segment != s4) dst: (tag = t3) // src: (tag = t2 and segment != s0 and segment != s2) dst: (tag = t3 and segment != s5) // src: (tag = t2 and segment != s0 and segment != s4) dst: (tag = t3 and segment != s3) @@ -294,7 +294,7 @@ func TestComputeAllowGivenDenies(t *testing.T) { atomicAllowDst := &atomicTerm{property: testTag, toVal: fmt.Sprintf("t%v", 2*i+1)} conjAllowSrc := Conjunction{atomicAllowSrc} conjAllowDst := Conjunction{atomicAllowDst} - allowPaths = append(allowPaths, &SymbolicPath{Src: conjAllowSrc, Dst: conjAllowDst, Conn: netset.AllTransports()}) + allowPaths = append(allowPaths, &SymbolicPath{Src: conjAllowSrc, Dst: conjAllowDst, Conn: netset.AllTCPTransport()}) } atomicDenySrc := &atomicTerm{property: testSegment, toVal: fmt.Sprintf("s%v", 2*i)} atomicDenyDst := &atomicTerm{property: testSegment, toVal: fmt.Sprintf("s%v", 2*i+1)} @@ -306,22 +306,22 @@ func TestComputeAllowGivenDenies(t *testing.T) { res := ComputeAllowGivenDenies(&allowPaths, &denyPaths) fmt.Printf("ComputeAllowGivenDenies:\n%v\n", res.String()) require.Equal(t, - "All Connections from (tag = t0 and segment != s0 and segment != s2 and segment != s4) to (tag = t1)\n"+ - "All Connections from (tag = t0 and segment != s0 and segment != s2) to (tag = t1 and segment != s5)\n"+ - "All Connections from (tag = t0 and segment != s0 and segment != s4) to (tag = t1 and segment != s3)\n"+ - "All Connections from (tag = t0 and segment != s0) to (tag = t1 and segment != s3 and segment != s5)\n"+ - "All Connections from (tag = t0 and segment != s2 and segment != s4) to (tag = t1 and segment != s1)\n"+ - "All Connections from (tag = t0 and segment != s2) to (tag = t1 and segment != s1 and segment != s5)\n"+ - "All Connections from (tag = t0 and segment != s4) to (tag = t1 and segment != s1 and segment != s3)\n"+ - "All Connections from (tag = t0) to (tag = t1 and segment != s1 and segment != s3 and segment != s5)\n"+ - "All Connections from (tag = t2 and segment != s0 and segment != s2 and segment != s4) to (tag = t3)\n"+ - "All Connections from (tag = t2 and segment != s0 and segment != s2) to (tag = t3 and segment != s5)\n"+ - "All Connections from (tag = t2 and segment != s0 and segment != s4) to (tag = t3 and segment != s3)\n"+ - "All Connections from (tag = t2 and segment != s0) to (tag = t3 and segment != s3 and segment != s5)\n"+ - "All Connections from (tag = t2 and segment != s2 and segment != s4) to (tag = t3 and segment != s1)\n"+ - "All Connections from (tag = t2 and segment != s2) to (tag = t3 and segment != s1 and segment != s5)\n"+ - "All Connections from (tag = t2 and segment != s4) to (tag = t3 and segment != s1 and segment != s3)\n"+ - "All Connections from (tag = t2) to (tag = t3 and segment != s1 and segment != s3 and segment != s5)", + "TCP from (tag = t0 and segment != s0 and segment != s2 and segment != s4) to (tag = t1)\n"+ + "TCP from (tag = t0 and segment != s0 and segment != s2) to (tag = t1 and segment != s5)\n"+ + "TCP from (tag = t0 and segment != s0 and segment != s4) to (tag = t1 and segment != s3)\n"+ + "TCP from (tag = t0 and segment != s0) to (tag = t1 and segment != s3 and segment != s5)\n"+ + "TCP from (tag = t0 and segment != s2 and segment != s4) to (tag = t1 and segment != s1)\n"+ + "TCP from (tag = t0 and segment != s2) to (tag = t1 and segment != s1 and segment != s5)\n"+ + "TCP from (tag = t0 and segment != s4) to (tag = t1 and segment != s1 and segment != s3)\n"+ + "TCP from (tag = t0) to (tag = t1 and segment != s1 and segment != s3 and segment != s5)\n"+ + "TCP from (tag = t2 and segment != s0 and segment != s2 and segment != s4) to (tag = t3)\n"+ + "TCP from (tag = t2 and segment != s0 and segment != s2) to (tag = t3 and segment != s5)\n"+ + "TCP from (tag = t2 and segment != s0 and segment != s4) to (tag = t3 and segment != s3)\n"+ + "TCP from (tag = t2 and segment != s0) to (tag = t3 and segment != s3 and segment != s5)\n"+ + "TCP from (tag = t2 and segment != s2 and segment != s4) to (tag = t3 and segment != s1)\n"+ + "TCP from (tag = t2 and segment != s2) to (tag = t3 and segment != s1 and segment != s5)\n"+ + "TCP from (tag = t2 and segment != s4) to (tag = t3 and segment != s1 and segment != s3)\n"+ + "TCP from (tag = t2) to (tag = t3 and segment != s1 and segment != s3 and segment != s5)", ComputeAllowGivenDenies(&allowPaths, &denyPaths).String(), "ComputeAllowGivenDenies computation not as expected") } From a73f2481b3585dbde99e9d813b1e4df438f76750 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 16:00:49 +0200 Subject: [PATCH 60/76] lint --- pkg/symbolicexpr/conjunction.go | 16 ---------------- pkg/symbolicexpr/symbolicPath.go | 4 ++-- pkg/symbolicexpr/symbolicexpr_test.go | 3 --- 3 files changed, 2 insertions(+), 21 deletions(-) diff --git a/pkg/symbolicexpr/conjunction.go b/pkg/symbolicexpr/conjunction.go index 78189e0..dd5a3e5 100644 --- a/pkg/symbolicexpr/conjunction.go +++ b/pkg/symbolicexpr/conjunction.go @@ -74,22 +74,6 @@ func (c *Conjunction) isEmptySet() bool { return false } -// checks whether conjunction other is implies by conjunction c -func (c *Conjunction) implies(other *Conjunction) bool { - if other.isTautology() { - return true - } - if len(*c) == 0 || len(*other) == 0 { - return false - } - for _, atomicTerm := range *other { - if !c.contains(atomicTerm) { - return false - } - } - return true -} - // checks whether conjunction other is disjoint to conjunction c // this is the case if there's a term in c and its contradiction in other // we will later add hints diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index f3776ed..6c04d1e 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -18,8 +18,8 @@ func (path *SymbolicPath) isEmpty() bool { // checks whether paths are disjoint. This is the case when one of the path's components (src, dst, conn) are disjoint func (path *SymbolicPath) disJointPaths(other *SymbolicPath) bool { - return (*path).Conn.Intersect((*other).Conn).IsEmpty() || (*path).Src.disjoint(&(*other).Src) || - (*path).Dst.disjoint(&(*other).Dst) + return path.Conn.Intersect((*other).Conn).IsEmpty() || path.Src.disjoint(&(*other).Src) || + path.Dst.disjoint(&(*other).Dst) } func (paths *SymbolicPaths) String() string { diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 7ed25b0..2428eb6 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -108,7 +108,6 @@ func TestComputeAllowGivenDenySingleTermEach2(t *testing.T) { fmt.Printf("allowGivenDenyPaths is %v\n", allowGivenDenyPaths.String()) require.Equal(t, "UDP from (s1 = str1) to (d1 = str1)", allowGivenDenyPaths.String(), "ComputeAllowGivenDenies does not work as expected") - } // Input: @@ -134,7 +133,6 @@ func TestComputeAllowGivenDenySingleTermEach3(t *testing.T) { fmt.Printf("allowGivenDenyPaths is %v\n", allowGivenDenyPaths.String()) require.Equal(t, "TCP src-ports: 51-65535 from (s1 = str1) to (d1 = str1)", allowGivenDenyPaths.String(), "ComputeAllowGivenDenies does not work as expected") - } // Input: @@ -157,7 +155,6 @@ func TestComputeAllowGivenDenySingleTermEach4(t *testing.T) { fmt.Printf("allowGivenDenyPaths is %v\n", allowGivenDenyPaths.String()) require.Equal(t, "empty set ", allowGivenDenyPaths.String(), "ComputeAllowGivenDenies does not work as expected") - } // Input: From 93cb6e85947f3de5537dad0ec92cb8df201b6a64 Mon Sep 17 00:00:00 2001 From: shirim Date: Mon, 23 Dec 2024 16:05:21 +0200 Subject: [PATCH 61/76] lint --- pkg/symbolicexpr/symbolicPath.go | 4 ++-- pkg/symbolicexpr/symbolicexpr_test.go | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 6c04d1e..867505f 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -18,8 +18,8 @@ func (path *SymbolicPath) isEmpty() bool { // checks whether paths are disjoint. This is the case when one of the path's components (src, dst, conn) are disjoint func (path *SymbolicPath) disJointPaths(other *SymbolicPath) bool { - return path.Conn.Intersect((*other).Conn).IsEmpty() || path.Src.disjoint(&(*other).Src) || - path.Dst.disjoint(&(*other).Dst) + return path.Conn.Intersect(other.Conn).IsEmpty() || path.Src.disjoint(&other.Src) || + path.Dst.disjoint(&other.Dst) } func (paths *SymbolicPaths) String() string { diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index 2428eb6..dfd17ca 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -2,11 +2,11 @@ package symbolicexpr import ( "fmt" - "github.com/np-guard/models/pkg/netp" "testing" "github.com/stretchr/testify/require" + "github.com/np-guard/models/pkg/netp" "github.com/np-guard/models/pkg/netset" ) From cff31489bdb5f178b7994c24fd2f134ceb268e55 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 24 Dec 2024 10:16:00 +0200 Subject: [PATCH 62/76] block comment --- pkg/synthesis/synthesis.go | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 2f00486..44a9f38 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -17,10 +17,12 @@ func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.Out } config := parser.GetConfig() policy := preProcessing(config.Fw.CategoriesSpecs) + //computeAllowOnlyRulesForPolicy(policy) fmt.Println(policy.string()) return "", nil } +// todo: output should be (categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) // preProcessing: convert policy from spec to symbolicPolicy struct func preProcessing(categoriesSpecs []*dfw.CategorySpec) (policy symbolicPolicy) { policy = symbolicPolicy{} @@ -44,3 +46,22 @@ func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) } return res } + +/* +func computeAllowOnlyRulesForPolicy(policy *symbolicPolicy) { + computeAllowOnlyRulesForRules(&policy.inbound) + computeAllowOnlyRulesForRules(&policy.outbound) +} + +func computeAllowOnlyRulesForRules(inboundOrOutbound *[]*symbolicRule) { + for _, symbolicRule := range *inboundOrOutbound { + computeAllowOnlyFromRule(symbolicRule, nil, nil) + } +} + +// computes Allow only rules from rule, using the following alg: + +func computeAllowOnlyFromRule(symbolicRule *symbolicRule, globalDenies, categoryPasses []*symbolicRule) { + _, _, _ = symbolicRule, globalDenies, categoryPasses +} +*/ From 12ae417cd29ae7a85444e283fa3af491d4bad436 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 24 Dec 2024 10:16:00 +0200 Subject: [PATCH 63/76] block comment --- pkg/synthesis/synthesis.go | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 2f00486..44a9f38 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -17,10 +17,12 @@ func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.Out } config := parser.GetConfig() policy := preProcessing(config.Fw.CategoriesSpecs) + //computeAllowOnlyRulesForPolicy(policy) fmt.Println(policy.string()) return "", nil } +// todo: output should be (categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) // preProcessing: convert policy from spec to symbolicPolicy struct func preProcessing(categoriesSpecs []*dfw.CategorySpec) (policy symbolicPolicy) { policy = symbolicPolicy{} @@ -44,3 +46,22 @@ func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) } return res } + +/* +func computeAllowOnlyRulesForPolicy(policy *symbolicPolicy) { + computeAllowOnlyRulesForRules(&policy.inbound) + computeAllowOnlyRulesForRules(&policy.outbound) +} + +func computeAllowOnlyRulesForRules(inboundOrOutbound *[]*symbolicRule) { + for _, symbolicRule := range *inboundOrOutbound { + computeAllowOnlyFromRule(symbolicRule, nil, nil) + } +} + +// computes Allow only rules from rule, using the following alg: + +func computeAllowOnlyFromRule(symbolicRule *symbolicRule, globalDenies, categoryPasses []*symbolicRule) { + _, _, _ = symbolicRule, globalDenies, categoryPasses +} +*/ From 6e91d6cb1193c0a0e1cf723fd58aeb347477e2f1 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 24 Dec 2024 11:18:50 +0200 Subject: [PATCH 64/76] fixed flow - category is stll essential after preprocessing --- pkg/model/dfw/category.go | 6 +- pkg/model/dfw/dfw.go | 2 +- pkg/synthesis/model.go | 17 ---- pkg/synthesis/symbolicPolicy.go | 78 +++++++++++++++++++ pkg/synthesis/synthesis.go | 52 +------------ pkg/synthesis/synthesis_test.go | 7 +- .../ExampleDumbeldore.txt | 25 +++--- 7 files changed, 101 insertions(+), 86 deletions(-) create mode 100644 pkg/synthesis/symbolicPolicy.go diff --git a/pkg/model/dfw/category.go b/pkg/model/dfw/category.go index 3616670..ebb7e43 100644 --- a/pkg/model/dfw/category.go +++ b/pkg/model/dfw/category.go @@ -51,7 +51,7 @@ const ( } }*/ -func (d DfwCategory) string() string { +func (d DfwCategory) String() string { switch d { case ethernetCategory: return EthernetStr @@ -160,7 +160,7 @@ func (c *CategorySpec) string() string { for i := range c.rules { rulesStr[i+1] = c.rules[i].string() } - return fmt.Sprintf("category: %s\n%s\ndefault action: %s", c.Category.string(), + return fmt.Sprintf("category: %s\n%s\ndefault action: %s", c.Category.String(), strings.Join(rulesStr, lineSeparatorStr), string(c.defaultAction)) } @@ -199,7 +199,7 @@ func (c *CategorySpec) addRule(src, dst []*endpoints.VM, srcGroups, dstGroups, s scope: scope, ScopeGroups: scopeGroups, secPolicyName: secPolicyName, - secPolicyCategory: c.Category.string(), + secPolicyCategory: c.Category.String(), categoryRef: c, dfwRef: c.dfwRef, } diff --git a/pkg/model/dfw/dfw.go b/pkg/model/dfw/dfw.go index a076259..9220702 100644 --- a/pkg/model/dfw/dfw.go +++ b/pkg/model/dfw/dfw.go @@ -118,7 +118,7 @@ func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroup isAllSrcGroups, isAllDstGroups bool, conn *netset.TransportSet, categoryStr, actionStr, direction string, ruleID int, origRule *collector.Rule, scope []*endpoints.VM, secPolicyName string, origDefaultRule *collector.FirewallRule) { for _, fwCategory := range d.CategoriesSpecs { - if fwCategory.Category.string() == categoryStr { + if fwCategory.Category.String() == categoryStr { fwCategory.addRule(src, dst, srcGroups, dstGroups, scopeGroups, isAllSrcGroups, isAllDstGroups, conn, actionStr, direction, ruleID, origRule, scope, secPolicyName, origDefaultRule) } diff --git a/pkg/synthesis/model.go b/pkg/synthesis/model.go index f0f16b0..8618781 100644 --- a/pkg/synthesis/model.go +++ b/pkg/synthesis/model.go @@ -1,9 +1,6 @@ package synthesis import ( - "fmt" - "strings" - "github.com/np-guard/vmware-analyzer/pkg/collector" "github.com/np-guard/vmware-analyzer/pkg/model/dfw" "github.com/np-guard/vmware-analyzer/pkg/model/endpoints" @@ -57,17 +54,3 @@ type Segments map[string]*collector.Segment // VMs map from VM name to the VM type VMs map[string]*endpoints.VM - -func (policy symbolicPolicy) string() string { - return "\nsymbolicInbound Rules:\n~~~~~~~~~~~~~~~~~~~~~~~\n" + strSymbolicRules(policy.inbound) + - "\nsymbolicOutbound Rules:\n~~~~~~~~~~~~~~~~~~~~~~~~~\n" + strSymbolicRules(policy.outbound) -} - -func strSymbolicRules(rules []*symbolicRule) string { - resStr := make([]string, len(rules)) - for i, rule := range rules { - resStr[i] = fmt.Sprintf("\tcategory: %v action: %v paths: %v", rule.origRuleCategory, rule.origRule.Action, - rule.origSymbolicPaths) - } - return strings.Join(resStr, "\n") -} diff --git a/pkg/synthesis/symbolicPolicy.go b/pkg/synthesis/symbolicPolicy.go new file mode 100644 index 0000000..264a5b3 --- /dev/null +++ b/pkg/synthesis/symbolicPolicy.go @@ -0,0 +1,78 @@ +package synthesis + +import ( + "fmt" + "github.com/np-guard/vmware-analyzer/pkg/model/dfw" + "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" + "strings" +) + +// preProcessing: convert policy from spec to symbolicPolicy struct +func preProcessing(categoriesSpecs []*dfw.CategorySpec) (categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) { + categoryToPolicy = map[dfw.DfwCategory]*symbolicPolicy{} + for _, category := range categoriesSpecs { + categoryPolicy := symbolicPolicy{} + if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { + continue + } + categoryPolicy.inbound = append(categoryPolicy.inbound, convertRulesToSymbolicPaths(category.ProcessedRules.Inbound, + category.Category)...) + categoryPolicy.outbound = append(categoryPolicy.outbound, convertRulesToSymbolicPaths(category.ProcessedRules.Outbound, + category.Category)...) + + categoryToPolicy[category.Category] = &categoryPolicy + } + return categoryToPolicy +} + +func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) []*symbolicRule { + res := make([]*symbolicRule, len(rules)) + for i, rule := range rules { + ruleSymbolicPaths := symbolicexpr.ConvertFWRuleToSymbolicPaths(rule) + res[i] = &symbolicRule{origRule: rule, origRuleCategory: category, origSymbolicPaths: ruleSymbolicPaths} + } + return res +} + +func (policy symbolicPolicy) string() string { + return fmt.Sprintf("symbolic inbound rules:\n%v\nsymbolic outbound rules:\n%v", strSymbolicRules(policy.inbound), + strSymbolicRules(policy.outbound)) +} + +func strSymbolicRules(rules []*symbolicRule) string { + resStr := make([]string, len(rules)) + for i, rule := range rules { + resStr[i] = fmt.Sprintf("\t%v. action: %v paths: %v", i, rule.origRule.Action, rule.origSymbolicPaths) + } + return strings.Join(resStr, "\n") +} + +func stringCategoryToSymbolicPolicy(categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) string { + res := []string{} + for category, policy := range categoryToPolicy { + if len(policy.inbound) > 0 || len(policy.outbound) > 0 { + res = append(res, fmt.Sprintf("category: %s\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n%v", + category.String(), policy.string())) + } + } + return strings.Join(res, "\n") +} + +/* +func computeAllowOnlyRulesForPolicy(policy *symbolicPolicy) { + computeAllowOnlyRulesForRules(&policy.inbound) + computeAllowOnlyRulesForRules(&policy.outbound) +} + +func computeAllowOnlyRulesForRules(inboundOrOutbound *[]*symbolicRule) { + for _, symbolicRule := range *inboundOrOutbound { + computeAllowOnlyFromRule(symbolicRule, nil, nil) + } +} + +// computes Allow only rules from rule, using the following alg: + +func computeAllowOnlyFromRule(symbolicRule *symbolicRule, globalDenies, categoryPasses []*symbolicRule) { + _, _, _ = symbolicRule, globalDenies, categoryPasses +} +*/ diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 44a9f38..70b89d1 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -5,63 +5,17 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/collector" "github.com/np-guard/vmware-analyzer/pkg/model" - "github.com/np-guard/vmware-analyzer/pkg/model/dfw" - "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" ) func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.OutputParameters) (string, error) { + _ = params parser := model.NewNSXConfigParserFromResourcesContainer(recourses) err := parser.RunParser() if err != nil { return "", err } config := parser.GetConfig() - policy := preProcessing(config.Fw.CategoriesSpecs) - //computeAllowOnlyRulesForPolicy(policy) - fmt.Println(policy.string()) + categoryToPolicy := preProcessing(config.Fw.CategoriesSpecs) + fmt.Println(stringCategoryToSymbolicPolicy(categoryToPolicy)) return "", nil } - -// todo: output should be (categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) -// preProcessing: convert policy from spec to symbolicPolicy struct -func preProcessing(categoriesSpecs []*dfw.CategorySpec) (policy symbolicPolicy) { - policy = symbolicPolicy{} - for _, category := range categoriesSpecs { - if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { - continue - } - policy.inbound = append(policy.inbound, convertRulesToSymbolicPaths(category.ProcessedRules.Inbound, - category.Category)...) - policy.outbound = append(policy.outbound, convertRulesToSymbolicPaths(category.ProcessedRules.Outbound, - category.Category)...) - } - return policy -} - -func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) []*symbolicRule { - res := make([]*symbolicRule, len(rules)) - for i, rule := range rules { - ruleSymbolicPaths := symbolicexpr.ConvertFWRuleToSymbolicPaths(rule) - res[i] = &symbolicRule{origRule: rule, origRuleCategory: category, origSymbolicPaths: ruleSymbolicPaths} - } - return res -} - -/* -func computeAllowOnlyRulesForPolicy(policy *symbolicPolicy) { - computeAllowOnlyRulesForRules(&policy.inbound) - computeAllowOnlyRulesForRules(&policy.outbound) -} - -func computeAllowOnlyRulesForRules(inboundOrOutbound *[]*symbolicRule) { - for _, symbolicRule := range *inboundOrOutbound { - computeAllowOnlyFromRule(symbolicRule, nil, nil) - } -} - -// computes Allow only rules from rule, using the following alg: - -func computeAllowOnlyFromRule(symbolicRule *symbolicRule, globalDenies, categoryPasses []*symbolicRule) { - _, _, _ = symbolicRule, globalDenies, categoryPasses -} -*/ diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 0df53cc..74e86ee 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -38,13 +38,14 @@ func (a *synthesisTest) runPreprocessing(t *testing.T) { err1 := parser.RunParser() require.Nil(t, err1) config := parser.GetConfig() - policy := preProcessing(config.Fw.CategoriesSpecs) - fmt.Println(policy.string()) + categoryToPolicy := preProcessing(config.Fw.CategoriesSpecs) + fmt.Println(stringCategoryToSymbolicPolicy(categoryToPolicy)) expectedOutputFileName := filepath.Join(getTestsDirOut(), a.name+".txt") expectedOutput, err2 := os.ReadFile(expectedOutputFileName) require.Nil(t, err2) expectedOutputStr := string(expectedOutput) - require.Equal(t, cleanStr(policy.string()), cleanStr(expectedOutputStr), "output not as expected") + require.Equal(t, cleanStr(stringCategoryToSymbolicPolicy(categoryToPolicy)), cleanStr(expectedOutputStr), + "output not as expected") } func TestPreprocessing(t *testing.T) { diff --git a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt index 865373e..e5128b3 100644 --- a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt +++ b/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt @@ -1,13 +1,12 @@ - -symbolicInbound Rules: -~~~~~~~~~~~~~~~~~~~~~~~ - category: 4 action: allow paths: (group = DumbledoreAll) to (*) - category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) - category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) - category: 4 action: deny paths: (*) to (*) -symbolicOutbound Rules: -~~~~~~~~~~~~~~~~~~~~~~~~~ - category: 4 action: allow paths: (group = DumbledoreAll) to (*) - category: 4 action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) - category: 4 action: allow paths: (group = DumbledoreNoSly) to (*) - category: 4 action: deny paths: (*) to (*) \ No newline at end of file +category: Application +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +symbolic inbound rules: + 0. action: allow paths: (group = DumbledoreAll) to (*) + 1. action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) + 2. action: allow paths: (group = DumbledoreNoSly) to (*) + 3. action: deny paths: (*) to (*) +symbolic outbound rules: + 0. action: allow paths: (group = DumbledoreAll) to (*) + 1. action: deny paths: (group = DumbledoreNoSly) to (group = Slytherin) + 2. action: allow paths: (group = DumbledoreNoSly) to (*) + 3. action: deny paths: (*) to (*) \ No newline at end of file From 607065e246c98a94852844e35cd7b09d78583bab Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 24 Dec 2024 11:20:46 +0200 Subject: [PATCH 65/76] lint --- pkg/synthesis/symbolicPolicy.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/synthesis/symbolicPolicy.go b/pkg/synthesis/symbolicPolicy.go index 264a5b3..e559d11 100644 --- a/pkg/synthesis/symbolicPolicy.go +++ b/pkg/synthesis/symbolicPolicy.go @@ -2,9 +2,10 @@ package synthesis import ( "fmt" + "strings" + "github.com/np-guard/vmware-analyzer/pkg/model/dfw" "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" - "strings" ) // preProcessing: convert policy from spec to symbolicPolicy struct From 832f977c17870e167d989256ea0bb160e603de92 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 24 Dec 2024 14:52:58 +0200 Subject: [PATCH 66/76] code for policy conversion (not checked yet) --- pkg/model/dfw/category.go | 14 +++--- pkg/model/dfw/dfw.go | 8 ++-- pkg/model/dfw/rule.go | 36 +++++++------- pkg/synthesis/symbolicPolicy.go | 85 ++++++++++++++++++++++++++++----- pkg/synthesis/synthesis.go | 2 + 5 files changed, 104 insertions(+), 41 deletions(-) diff --git a/pkg/model/dfw/category.go b/pkg/model/dfw/category.go index a06369f..ee046b9 100644 --- a/pkg/model/dfw/category.go +++ b/pkg/model/dfw/category.go @@ -119,25 +119,25 @@ func (c *CategorySpec) analyzeCategory(src, dst *endpoints.VM, isIngress bool, for _, rule := range rules /*c.rules*/ { if rule.processedRuleCapturesPair(src, dst) /*rule.capturesPair(src, dst, isIngress)*/ { switch rule.Action { - case actionAllow: + case ActionAllow: addedAllowedConns := rule.Conn.Subtract(deniedConns).Subtract(jumpToAppConns) allowedConns = allowedConns.Union(addedAllowedConns) - case actionDeny: + case ActionDeny: addedDeniedConns := rule.Conn.Subtract(allowedConns).Subtract(jumpToAppConns) deniedConns = deniedConns.Union(addedDeniedConns) - case actionJumpToApp: + case ActionJumpToApp: addedJumpToAppConns := rule.Conn.Subtract(allowedConns).Subtract(deniedConns) jumpToAppConns = jumpToAppConns.Union(addedJumpToAppConns) } } } switch c.defaultAction { - case actionNone: // no default configured for this category + case ActionNone: // no default configured for this category nonDet = netset.AllTransports().Subtract(allowedConns).Subtract(deniedConns).Subtract(jumpToAppConns) - case actionAllow: // default allow + case ActionAllow: // default allow allowedConns = netset.AllTransports().Subtract(deniedConns).Subtract(jumpToAppConns) nonDet = netset.NoTransports() - case actionDeny: // default deny + case ActionDeny: // default deny deniedConns = netset.AllTransports().Subtract(allowedConns).Subtract(jumpToAppConns) nonDet = netset.NoTransports() default: @@ -218,7 +218,7 @@ func newEmptyCategory(c DfwCategory, d *DFW) *CategorySpec { return &CategorySpec{ Category: c, dfwRef: d, - defaultAction: actionNone, + defaultAction: ActionNone, ProcessedRules: &EffectiveRules{}, } } diff --git a/pkg/model/dfw/dfw.go b/pkg/model/dfw/dfw.go index 881f623..f9dee54 100644 --- a/pkg/model/dfw/dfw.go +++ b/pkg/model/dfw/dfw.go @@ -62,7 +62,7 @@ func (d *DFW) AllowedConnectionsIngressOrEgress(src, dst *endpoints.VM, isIngres allAllowedConns).Subtract(allDeniedConns) } - if d.defaultAction == actionAllow { + if d.defaultAction == ActionAllow { // if the last category has no default, use the "global" default (todo: check where this value is configured in the api) allAllowedConns = allAllowedConns.Union(allNotDeterminedConns) } @@ -151,10 +151,10 @@ func (d *DFW) AddRule(src, dst []*endpoints.VM, srcGroups, dstGroups, scopeGroup // NewEmptyDFW returns new DFW with global default as from input func NewEmptyDFW(globalDefaultAllow bool) *DFW { res := &DFW{ - defaultAction: actionDeny, + defaultAction: ActionDeny, } if globalDefaultAllow { - res.defaultAction = actionAllow + res.defaultAction = ActionAllow } for _, c := range categoriesList { res.CategoriesSpecs = append(res.CategoriesSpecs, newEmptyCategory(c, res)) @@ -163,7 +163,7 @@ func NewEmptyDFW(globalDefaultAllow bool) *DFW { } func (d *DFW) GlobalDefaultAllow() bool { - return d.defaultAction == actionAllow + return d.defaultAction == ActionAllow } func (d *DFW) SetPathsToDisplayNames(m map[string]string) { diff --git a/pkg/model/dfw/rule.go b/pkg/model/dfw/rule.go index 03b2c38..e644886 100644 --- a/pkg/model/dfw/rule.go +++ b/pkg/model/dfw/rule.go @@ -24,34 +24,34 @@ const ( var ingressDirections = []string{"IN", "IN_OUT"}*/ const ( - actionAllow RuleAction = "allow" - actionDeny RuleAction = "deny" // currently not differentiating between "reject" and "drop" - actionJumpToApp RuleAction = "jump_to_application" - actionNone RuleAction = "none" // to mark that a default rule is not configured + ActionAllow RuleAction = "allow" + ActionDeny RuleAction = "deny" // currently not differentiating between "reject" and "drop" + ActionJumpToApp RuleAction = "jump_to_application" + ActionNone RuleAction = "none" // to mark that a default rule is not configured ) /*func actionFromString(input string) RuleAction { switch input { - case string(actionAllow): - return actionAllow - case string(actionDeny): - return actionDeny - case string(actionJumpToApp): - return actionJumpToApp + case string(ActionAllow): + return ActionAllow + case string(ActionDeny): + return ActionDeny + case string(ActionJumpToApp): + return ActionJumpToApp } - return actionDeny + return ActionDeny }*/ func actionFromString(s string) RuleAction { switch strings.ToLower(s) { - case string(actionAllow): - return actionAllow - case string(actionDeny), "reject", "drop": // TODO: change - return actionDeny - case string(actionJumpToApp): - return actionJumpToApp + case string(ActionAllow): + return ActionAllow + case string(ActionDeny), "reject", "drop": // TODO: change + return ActionDeny + case string(ActionJumpToApp): + return ActionJumpToApp default: - return actionNone + return ActionNone } } diff --git a/pkg/synthesis/symbolicPolicy.go b/pkg/synthesis/symbolicPolicy.go index e559d11..4d51159 100644 --- a/pkg/synthesis/symbolicPolicy.go +++ b/pkg/synthesis/symbolicPolicy.go @@ -8,6 +8,12 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" ) +///////////////////////////////////////////////////////////////////////////////////// +// preprocessing related functionality +///////////////////////////////////////////////////////////////////////////////////// + +// todo: one category may have few instances in the original NSX policy; need to chain these to one + // preProcessing: convert policy from spec to symbolicPolicy struct func preProcessing(categoriesSpecs []*dfw.CategorySpec) (categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) { categoryToPolicy = map[dfw.DfwCategory]*symbolicPolicy{} @@ -59,21 +65,76 @@ func stringCategoryToSymbolicPolicy(categoryToPolicy map[dfw.DfwCategory]*symbol return strings.Join(res, "\n") } -/* -func computeAllowOnlyRulesForPolicy(policy *symbolicPolicy) { - computeAllowOnlyRulesForRules(&policy.inbound) - computeAllowOnlyRulesForRules(&policy.outbound) -} +///////////////////////////////////////////////////////////////////////////////////// +// convert symbolic rules to allow only functionality +///////////////////////////////////////////////////////////////////////////////////// -func computeAllowOnlyRulesForRules(inboundOrOutbound *[]*symbolicRule) { - for _, symbolicRule := range *inboundOrOutbound { - computeAllowOnlyFromRule(symbolicRule, nil, nil) +func computeAllowOnlyRulesForPolicy(categoriesSpecs []*dfw.CategorySpec, + categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) symbolicPolicy { + allowOnlyPolicy := symbolicPolicy{} + globalInboundDenies, globalOutboundDenies := symbolicexpr.SymbolicPaths{}, symbolicexpr.SymbolicPaths{} + // we go over categoriesSpecs to make sure we follow the correct order of categories + for _, category := range categoriesSpecs { + thisCategoryPolicy := categoryToPolicy[category.Category] + inboundAllow, outboundAllow := computeAllowOnlyRulesForCategory(thisCategoryPolicy, &globalInboundDenies, + &globalOutboundDenies) + allowOnlyPolicy.inbound = append(allowOnlyPolicy.inbound, inboundAllow...) + allowOnlyPolicy.outbound = append(allowOnlyPolicy.inbound, outboundAllow...) + // todo: handle default rule } + + return allowOnlyPolicy } -// computes Allow only rules from rule, using the following alg: +func computeAllowOnlyRulesForCategory(policy *symbolicPolicy, globalInboundDenied, + globalOutboundDenies *symbolicexpr.SymbolicPaths) (inboundAllowOnly, outboundAllowOnly []*symbolicRule) { + inboundAllow, inboundDeny := computeAllowOnlyForCategory(&policy.inbound, globalInboundDenied) + outboundAllow, outboundDeny := computeAllowOnlyForCategory(&policy.outbound, globalOutboundDenies) + inboundAllowOnly = append(inboundAllowOnly, inboundAllow...) + outboundAllowOnly = append(outboundAllowOnly, outboundAllow...) + globalInboundDenied = inboundDeny + globalOutboundDenies = outboundDeny + return +} -func computeAllowOnlyFromRule(symbolicRule *symbolicRule, globalDenies, categoryPasses []*symbolicRule) { - _, _, _ = symbolicRule, globalDenies, categoryPasses +// computes allow only rules, using the following algorithm: +// For each category, in order: +// Initialization: +// +// category_passes = empty set +// +// For each rule, in order +// +// case pass: +// category_passes = category_passes or rule +// case deny: +// new_denies = merge(category_passes, deny_rule) +// global_denies = global_denies union new_denies +// case allow: +// new_allow = merge(global_denies or category_passes, allow_rule) +// global_allows = global_allows or new_allows +// Output: global_allows +func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, + globalDenies *symbolicexpr.SymbolicPaths) (allowRule []*symbolicRule, denyPaths *symbolicexpr.SymbolicPaths) { + allowOnlyRules := []*symbolicRule{} + categoryPasses := symbolicexpr.SymbolicPaths{} + newGlobalDenies := symbolicexpr.SymbolicPaths{} + copy(newGlobalDenies, *globalDenies) + for _, rule := range *inboundOrOutbound { + symbolicDeniesAndPasses := symbolicexpr.SymbolicPaths{} + switch rule.origRule.Action { + case dfw.ActionJumpToApp: + categoryPasses = append(categoryPasses, *rule.origSymbolicPaths...) + case dfw.ActionDeny: + newSymbolicPaths := symbolicexpr.ComputeAllowGivenDenies(rule.origSymbolicPaths, &categoryPasses) + newGlobalDenies = append(newGlobalDenies, *newSymbolicPaths...) + case dfw.ActionAllow: + symbolicDeniesAndPasses = append(symbolicDeniesAndPasses, categoryPasses...) + newSymbolicPaths := symbolicexpr.ComputeAllowGivenDenies(rule.origSymbolicPaths, &symbolicDeniesAndPasses) + newRule := &symbolicRule{origRule: rule.origRule, origRuleCategory: rule.origRuleCategory, + origSymbolicPaths: rule.origSymbolicPaths, allowOnlyRulePaths: *newSymbolicPaths} + allowOnlyRules = append(allowOnlyRules, newRule) + } + } + return allowOnlyRules, &newGlobalDenies } -*/ diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 70b89d1..d06bc2d 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -17,5 +17,7 @@ func NSXSynthesis(recourses *collector.ResourcesContainerModel, params model.Out config := parser.GetConfig() categoryToPolicy := preProcessing(config.Fw.CategoriesSpecs) fmt.Println(stringCategoryToSymbolicPolicy(categoryToPolicy)) + allowOnlyPolicy := computeAllowOnlyRulesForPolicy(config.Fw.CategoriesSpecs, categoryToPolicy) + _ = allowOnlyPolicy return "", nil } From ee391c0eced39d7574474b2a722cc5f5ef1ca6f5 Mon Sep 17 00:00:00 2001 From: shirim Date: Tue, 24 Dec 2024 17:00:24 +0200 Subject: [PATCH 67/76] added nil checks runs without dump not verified --- pkg/synthesis/symbolicPolicy.go | 26 +++++++++++++++++--------- pkg/synthesis/synthesis.go | 3 +-- pkg/synthesis/synthesis_test.go | 14 ++++++++++++++ 3 files changed, 32 insertions(+), 11 deletions(-) diff --git a/pkg/synthesis/symbolicPolicy.go b/pkg/synthesis/symbolicPolicy.go index 4d51159..51147e0 100644 --- a/pkg/synthesis/symbolicPolicy.go +++ b/pkg/synthesis/symbolicPolicy.go @@ -76,8 +76,8 @@ func computeAllowOnlyRulesForPolicy(categoriesSpecs []*dfw.CategorySpec, // we go over categoriesSpecs to make sure we follow the correct order of categories for _, category := range categoriesSpecs { thisCategoryPolicy := categoryToPolicy[category.Category] - inboundAllow, outboundAllow := computeAllowOnlyRulesForCategory(thisCategoryPolicy, &globalInboundDenies, - &globalOutboundDenies) + inboundAllow, outboundAllow := computeAllowOnlyRulesForCategory(thisCategoryPolicy, + &globalInboundDenies, &globalOutboundDenies) allowOnlyPolicy.inbound = append(allowOnlyPolicy.inbound, inboundAllow...) allowOnlyPolicy.outbound = append(allowOnlyPolicy.inbound, outboundAllow...) // todo: handle default rule @@ -86,14 +86,22 @@ func computeAllowOnlyRulesForPolicy(categoriesSpecs []*dfw.CategorySpec, return allowOnlyPolicy } -func computeAllowOnlyRulesForCategory(policy *symbolicPolicy, globalInboundDenied, +func computeAllowOnlyRulesForCategory(policy *symbolicPolicy, globalInboundDenies, globalOutboundDenies *symbolicexpr.SymbolicPaths) (inboundAllowOnly, outboundAllowOnly []*symbolicRule) { - inboundAllow, inboundDeny := computeAllowOnlyForCategory(&policy.inbound, globalInboundDenied) - outboundAllow, outboundDeny := computeAllowOnlyForCategory(&policy.outbound, globalOutboundDenies) - inboundAllowOnly = append(inboundAllowOnly, inboundAllow...) - outboundAllowOnly = append(outboundAllowOnly, outboundAllow...) - globalInboundDenied = inboundDeny - globalOutboundDenies = outboundDeny + if policy == nil { + return + } + // todo: common code into func + if policy.inbound != nil { + inboundAllow, inboundDeny := computeAllowOnlyForCategory(&policy.inbound, globalInboundDenies) + inboundAllowOnly = append(inboundAllowOnly, inboundAllow...) + *globalInboundDenies = append(*globalInboundDenies, *inboundDeny...) + } + if policy.outbound != nil { + outboundAllow, outboundDeny := computeAllowOnlyForCategory(&policy.outbound, globalOutboundDenies) + outboundAllowOnly = append(outboundAllowOnly, outboundAllow...) + *globalOutboundDenies = append(*globalOutboundDenies, *outboundDeny...) + } return } diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 335d091..8450a29 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -7,8 +7,7 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/model" ) -func NSXToAbstractModelSynthesis(recourses *collector.ResourcesContainerModel, params model.OutputParameters) (string, error) { - _ = params +func NSXToAbstractModelSynthesis(recourses *collector.ResourcesContainerModel) (string, error) { parser := model.NewNSXConfigParserFromResourcesContainer(recourses) err := parser.RunParser() if err != nil { diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 74e86ee..ac5d3fb 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -32,6 +32,7 @@ var allTests = []synthesisTest{ }, } +// todo: extract common code and use in runSynthesis func (a *synthesisTest) runPreprocessing(t *testing.T) { rc := data.ExamplesGeneration(a.exData) parser := model.NewNSXConfigParserFromResourcesContainer(rc) @@ -56,6 +57,19 @@ func TestPreprocessing(t *testing.T) { } } +func (a *synthesisTest) runSynthesis(t *testing.T) { + rc := data.ExamplesGeneration(a.exData) + NSXToAbstractModelSynthesis(rc) +} + +func TestSynthesis(t *testing.T) { + logging.Init(logging.HighVerbosity) + for i := range allTests { + test := &allTests[i] + test.runSynthesis(t) + } +} + // getTestsDirOut returns the path to the dir where test output files are located func getTestsDirOut() string { currentDir, _ := os.Getwd() From fc2d273658e3006e21b0526a5761b04219390a91 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 11:12:52 +0200 Subject: [PATCH 68/76] minor refactoring; added printing functionality and temporarily testing prints --- pkg/symbolicexpr/symbolicPath.go | 4 +- pkg/symbolicexpr/symbolicexpr_test.go | 20 ++++---- .../{symbolicPolicy.go => symbolicRule.go} | 48 +++++++++++++------ 3 files changed, 45 insertions(+), 27 deletions(-) rename pkg/synthesis/{symbolicPolicy.go => symbolicRule.go} (81%) diff --git a/pkg/symbolicexpr/symbolicPath.go b/pkg/symbolicexpr/symbolicPath.go index 867505f..33f7dfa 100644 --- a/pkg/symbolicexpr/symbolicPath.go +++ b/pkg/symbolicexpr/symbolicPath.go @@ -7,7 +7,7 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/model/dfw" ) -func (path *SymbolicPath) string() string { +func (path *SymbolicPath) String() string { return path.Conn.String() + " from " + path.Src.string() + " to " + path.Dst.string() } @@ -28,7 +28,7 @@ func (paths *SymbolicPaths) String() string { } res := make([]string, len(*paths)) for i, path := range *paths { - res[i] = path.string() + res[i] = path.String() } return strings.Join(res, "\n") } diff --git a/pkg/symbolicexpr/symbolicexpr_test.go b/pkg/symbolicexpr/symbolicexpr_test.go index dfd17ca..68c76e4 100644 --- a/pkg/symbolicexpr/symbolicexpr_test.go +++ b/pkg/symbolicexpr/symbolicexpr_test.go @@ -32,9 +32,9 @@ func TestSymbolicPaths(t *testing.T) { conjDst = *conjDst.add(&negateAtomic) } conjSymbolicPath := SymbolicPath{Src: conjSrc, Dst: conjDst, Conn: netset.AllTCPTransport()} - fmt.Printf("\nconjSymbolicPath:\n%v\n", conjSymbolicPath.string()) + fmt.Printf("\nconjSymbolicPath:\n%v\n", conjSymbolicPath.String()) require.Equal(t, "TCP from (t1 = str1 and t2 = str2 and t3 = str3) to (t1 != str1 and t2 != str2 and t3 != str3)", - conjSymbolicPath.string(), "conjSymbolicPath not as expected") + conjSymbolicPath.String(), "conjSymbolicPath not as expected") println("conjEmpty", conjEmpty.string()) require.Equal(t, emptySet, conjEmpty.string(), "empty conjunction not as expected") } @@ -64,7 +64,7 @@ func TestComputeAllowGivenDenySingleTermEach1(t *testing.T) { conjDst2 = *conjDst2.add(atomicDst2) allowPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTransports()} denyPath := SymbolicPath{Src: conjSrc2, Dst: conjDst2, Conn: netset.AllUDPTransport()} - fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) + fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.String(), denyPath.String()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, "All Connections from (s1 = str1 and s2 != str2) to (d1 = str1)\n"+ @@ -96,7 +96,7 @@ func TestComputeAllowGivenDenySingleTermEach2(t *testing.T) { conjDst2 = *conjDst2.add(atomicDst2) allowPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllUDPTransport()} denyPath := SymbolicPath{Src: conjSrc2, Dst: conjDst2, Conn: netset.AllTCPTransport()} - fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) + fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.String(), denyPath.String()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) // computeAllowGivenAllowHigherDeny not optimized @@ -128,7 +128,7 @@ func TestComputeAllowGivenDenySingleTermEach3(t *testing.T) { allowPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTCPTransport()} denyPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.NewTCPTransport(0, 50, netp.MinPort, netp.MaxPort)} - fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.string(), denyPath.string()) + fmt.Printf("allowPath is %v\ndenyPath is %v\n", allowPath.String(), denyPath.String()) allowGivenDenyPaths := *ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) fmt.Printf("allowGivenDenyPaths is %v\n", allowGivenDenyPaths.String()) require.Equal(t, "TCP src-ports: 51-65535 from (s1 = str1) to (d1 = str1)", allowGivenDenyPaths.String(), @@ -150,7 +150,7 @@ func TestComputeAllowGivenDenySingleTermEach4(t *testing.T) { atomicDst1 := &atomicTerm{property: testDst1, toVal: "str1"} conjDst1 = *conjDst1.add(atomicDst1) path := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTCPTransport()} - fmt.Printf("allowPath is %v\ndenyPath is %v\n", path.string(), path.string()) + fmt.Printf("allowPath is %v\ndenyPath is %v\n", path.String(), path.String()) allowGivenDenyPaths := *ComputeAllowGivenDenies(&SymbolicPaths{&path}, &SymbolicPaths{&path}) fmt.Printf("allowGivenDenyPaths is %v\n", allowGivenDenyPaths.String()) require.Equal(t, "empty set ", allowGivenDenyPaths.String(), @@ -185,7 +185,7 @@ func TestComputeAllowGivenDenyThreeTermsEach(t *testing.T) { denyPathNoEffect := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllUDPTransport()} allowGivenDenyPaths := *ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath, &denyPathNoEffect}) - fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) + fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.String(), denyPath.String()) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDenyPaths.String()) require.Equal(t, "TCP from (s1 = str1 and s2 = str2 and s3 = str3 and s1` != str1`) to (s1 = str1 and s2 = str2 and s3 = str3)\n"+ @@ -221,7 +221,7 @@ func TestComputeAllowGivenDenyAllowTautology(t *testing.T) { tautologyConj := Conjunction{tautology{}} allowPath := SymbolicPath{Src: tautologyConj, Dst: tautologyConj, Conn: netset.AllTransports()} denyPath := SymbolicPath{Src: conjDeny, Dst: conjDeny, Conn: netset.AllUDPTransport()} - fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) + fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.String(), denyPath.String()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, @@ -249,7 +249,7 @@ func TestComputeAllowGivenDenyDenyTautology(t *testing.T) { tautologyConj := Conjunction{tautology{}} allowPath := SymbolicPath{Src: conjAllow, Dst: conjAllow, Conn: netset.AllTransports()} denyPath := SymbolicPath{Src: tautologyConj, Dst: tautologyConj, Conn: netset.AllTransports()} - fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.string(), denyPath.string()) + fmt.Printf("symbolicAllow is %s\nsymbolicDeny is %s\n", allowPath.String(), denyPath.String()) allowGivenDeny := *computeAllowGivenAllowHigherDeny(allowPath, denyPath) fmt.Printf("computeAllowGivenAllowHigherDeny(allowPath, denyPath) is\n%v\n", allowGivenDeny.String()) require.Equal(t, emptySet, allowGivenDeny.String(), @@ -341,7 +341,7 @@ func TestAllowDenyOptimizeEmptyPath(t *testing.T) { denyPath := SymbolicPath{Src: conjSrc1, Dst: conjDst1, Conn: netset.AllTransports()} allowWithDeny := ComputeAllowGivenDenies(&SymbolicPaths{&allowPath}, &SymbolicPaths{&denyPath}) fmt.Printf("allow path: %v with higher priority deny path:%v is:\n%v\n\n", - allowPath.string(), denyPath.string(), allowWithDeny.String()) + allowPath.String(), denyPath.String(), allowWithDeny.String()) require.Equal(t, "All Connections from (s1 = str1) to (d1 != str1)", allowWithDeny.String(), "optimized with deny not working properly") } diff --git a/pkg/synthesis/symbolicPolicy.go b/pkg/synthesis/symbolicRule.go similarity index 81% rename from pkg/synthesis/symbolicPolicy.go rename to pkg/synthesis/symbolicRule.go index 51147e0..406ff59 100644 --- a/pkg/synthesis/symbolicPolicy.go +++ b/pkg/synthesis/symbolicRule.go @@ -76,6 +76,9 @@ func computeAllowOnlyRulesForPolicy(categoriesSpecs []*dfw.CategorySpec, // we go over categoriesSpecs to make sure we follow the correct order of categories for _, category := range categoriesSpecs { thisCategoryPolicy := categoryToPolicy[category.Category] + if thisCategoryPolicy == nil { + continue + } inboundAllow, outboundAllow := computeAllowOnlyRulesForCategory(thisCategoryPolicy, &globalInboundDenies, &globalOutboundDenies) allowOnlyPolicy.inbound = append(allowOnlyPolicy.inbound, inboundAllow...) @@ -86,25 +89,24 @@ func computeAllowOnlyRulesForPolicy(categoriesSpecs []*dfw.CategorySpec, return allowOnlyPolicy } -func computeAllowOnlyRulesForCategory(policy *symbolicPolicy, globalInboundDenies, +// gets here only if policy is not nil +func computeAllowOnlyRulesForCategory(originalPolicy *symbolicPolicy, globalInboundDenies, globalOutboundDenies *symbolicexpr.SymbolicPaths) (inboundAllowOnly, outboundAllowOnly []*symbolicRule) { - if policy == nil { - return - } - // todo: common code into func - if policy.inbound != nil { - inboundAllow, inboundDeny := computeAllowOnlyForCategory(&policy.inbound, globalInboundDenies) - inboundAllowOnly = append(inboundAllowOnly, inboundAllow...) - *globalInboundDenies = append(*globalInboundDenies, *inboundDeny...) - } - if policy.outbound != nil { - outboundAllow, outboundDeny := computeAllowOnlyForCategory(&policy.outbound, globalOutboundDenies) - outboundAllowOnly = append(outboundAllowOnly, outboundAllow...) - *globalOutboundDenies = append(*globalOutboundDenies, *outboundDeny...) - } + inboundAllowOnly = computeAllowOnlyInboundOrOutbound(originalPolicy.inbound, globalInboundDenies) + outboundAllowOnly = computeAllowOnlyInboundOrOutbound(originalPolicy.outbound, globalOutboundDenies) return } +func computeAllowOnlyInboundOrOutbound(originalRules []*symbolicRule, globalDenies *symbolicexpr.SymbolicPaths) []*symbolicRule { + if originalRules == nil { + return nil + } + newAllows, newDenies := computeAllowOnlyForCategory(&originalRules, globalDenies) + *globalDenies = append(*globalDenies, *newDenies...) + return newAllows + +} + // computes allow only rules, using the following algorithm: // For each category, in order: // Initialization: @@ -129,6 +131,7 @@ func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, newGlobalDenies := symbolicexpr.SymbolicPaths{} copy(newGlobalDenies, *globalDenies) for _, rule := range *inboundOrOutbound { + fmt.Printf("rule action: %v symbolicPaths %v\n", rule.origRule.Action, rule.origSymbolicPaths.String()) symbolicDeniesAndPasses := symbolicexpr.SymbolicPaths{} switch rule.origRule.Action { case dfw.ActionJumpToApp: @@ -136,13 +139,28 @@ func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, case dfw.ActionDeny: newSymbolicPaths := symbolicexpr.ComputeAllowGivenDenies(rule.origSymbolicPaths, &categoryPasses) newGlobalDenies = append(newGlobalDenies, *newSymbolicPaths...) + fmt.Printf("newGlobalDenies is %v\n", newGlobalDenies.String()) case dfw.ActionAllow: symbolicDeniesAndPasses = append(symbolicDeniesAndPasses, categoryPasses...) newSymbolicPaths := symbolicexpr.ComputeAllowGivenDenies(rule.origSymbolicPaths, &symbolicDeniesAndPasses) newRule := &symbolicRule{origRule: rule.origRule, origRuleCategory: rule.origRuleCategory, origSymbolicPaths: rule.origSymbolicPaths, allowOnlyRulePaths: *newSymbolicPaths} allowOnlyRules = append(allowOnlyRules, newRule) + fmt.Printf("allowOnlyRules is %v\n", strAllowOnlyPathsOfRules(allowOnlyRules)) } } return allowOnlyRules, &newGlobalDenies } + +func strAllowOnlyPathsOfRules(rules []*symbolicRule) string { + res := []string{} + for _, rule := range rules { + if rule.allowOnlyRulePaths == nil { + continue + } + for _, path := range rule.allowOnlyRulePaths { + res = append(res, path.String()) + } + } + return strings.Join(res, "\n") +} From c3b3ff4b9647dce64a63ea892a406393e27c690d Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 13:57:19 +0200 Subject: [PATCH 69/76] typo fix, add prints and test --- pkg/synthesis/symbolicRule.go | 12 ++++++++---- pkg/synthesis/synthesis.go | 7 +++---- pkg/synthesis/synthesis_test.go | 4 +++- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/pkg/synthesis/symbolicRule.go b/pkg/synthesis/symbolicRule.go index 406ff59..75f22da 100644 --- a/pkg/synthesis/symbolicRule.go +++ b/pkg/synthesis/symbolicRule.go @@ -82,7 +82,7 @@ func computeAllowOnlyRulesForPolicy(categoriesSpecs []*dfw.CategorySpec, inboundAllow, outboundAllow := computeAllowOnlyRulesForCategory(thisCategoryPolicy, &globalInboundDenies, &globalOutboundDenies) allowOnlyPolicy.inbound = append(allowOnlyPolicy.inbound, inboundAllow...) - allowOnlyPolicy.outbound = append(allowOnlyPolicy.inbound, outboundAllow...) + allowOnlyPolicy.outbound = append(allowOnlyPolicy.outbound, outboundAllow...) // todo: handle default rule } @@ -131,7 +131,6 @@ func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, newGlobalDenies := symbolicexpr.SymbolicPaths{} copy(newGlobalDenies, *globalDenies) for _, rule := range *inboundOrOutbound { - fmt.Printf("rule action: %v symbolicPaths %v\n", rule.origRule.Action, rule.origSymbolicPaths.String()) symbolicDeniesAndPasses := symbolicexpr.SymbolicPaths{} switch rule.origRule.Action { case dfw.ActionJumpToApp: @@ -139,19 +138,24 @@ func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, case dfw.ActionDeny: newSymbolicPaths := symbolicexpr.ComputeAllowGivenDenies(rule.origSymbolicPaths, &categoryPasses) newGlobalDenies = append(newGlobalDenies, *newSymbolicPaths...) - fmt.Printf("newGlobalDenies is %v\n", newGlobalDenies.String()) case dfw.ActionAllow: symbolicDeniesAndPasses = append(symbolicDeniesAndPasses, categoryPasses...) newSymbolicPaths := symbolicexpr.ComputeAllowGivenDenies(rule.origSymbolicPaths, &symbolicDeniesAndPasses) newRule := &symbolicRule{origRule: rule.origRule, origRuleCategory: rule.origRuleCategory, origSymbolicPaths: rule.origSymbolicPaths, allowOnlyRulePaths: *newSymbolicPaths} allowOnlyRules = append(allowOnlyRules, newRule) - fmt.Printf("allowOnlyRules is %v\n", strAllowOnlyPathsOfRules(allowOnlyRules)) } } return allowOnlyRules, &newGlobalDenies } +func strAllowOnlyPolicy(policy *symbolicPolicy) string { + return fmt.Sprintf("\ninbound allow only rules\n~~~~~~~~~~~~~~~~~~~~~~~~~\n") + + strAllowOnlyPathsOfRules(policy.inbound) + + fmt.Sprintf("\noutbound allow only rules\n~~~~~~~~~~~~~~~~~~~~~~~~~\n") + + strAllowOnlyPathsOfRules(policy.outbound) +} + func strAllowOnlyPathsOfRules(rules []*symbolicRule) string { res := []string{} for _, rule := range rules { diff --git a/pkg/synthesis/synthesis.go b/pkg/synthesis/synthesis.go index 8450a29..9f3d151 100644 --- a/pkg/synthesis/synthesis.go +++ b/pkg/synthesis/synthesis.go @@ -7,16 +7,15 @@ import ( "github.com/np-guard/vmware-analyzer/pkg/model" ) -func NSXToAbstractModelSynthesis(recourses *collector.ResourcesContainerModel) (string, error) { +func NSXToAbstractModelSynthesis(recourses *collector.ResourcesContainerModel) (*symbolicPolicy, error) { parser := model.NewNSXConfigParserFromResourcesContainer(recourses) err := parser.RunParser() if err != nil { - return "", err + return nil, err } config := parser.GetConfig() categoryToPolicy := preProcessing(config.Fw.CategoriesSpecs) fmt.Println(stringCategoryToSymbolicPolicy(categoryToPolicy)) allowOnlyPolicy := computeAllowOnlyRulesForPolicy(config.Fw.CategoriesSpecs, categoryToPolicy) - _ = allowOnlyPolicy - return "", nil + return &allowOnlyPolicy, nil } diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index c35e4d0..0dba6bb 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -58,7 +58,9 @@ func TestPreprocessing(t *testing.T) { func (a *synthesisTest) runSynthesis(t *testing.T) { rc := data.ExamplesGeneration(a.exData) - NSXToAbstractModelSynthesis(rc) + allowOnlyPolicy, err := NSXToAbstractModelSynthesis(rc) + require.Nil(t, err) + fmt.Println(strAllowOnlyPolicy(allowOnlyPolicy)) } func TestSynthesis(t *testing.T) { From 34e5de377606336f8c697f211b0d68e6858a1db0 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 14:11:03 +0200 Subject: [PATCH 70/76] finalized initial tests --- pkg/synthesis/symbolicRule.go | 7 +++---- pkg/synthesis/synthesis_test.go | 13 +++++++++---- .../ExampleDumbeldore_ConvertToAbstract.txt | 8 ++++++++ ...dore.txt => ExampleDumbeldore_PreProcessing.txt} | 0 4 files changed, 20 insertions(+), 8 deletions(-) create mode 100644 pkg/synthesis/tests_expected_output/ExampleDumbeldore_ConvertToAbstract.txt rename pkg/synthesis/tests_expected_output/{ExampleDumbeldore.txt => ExampleDumbeldore_PreProcessing.txt} (100%) diff --git a/pkg/synthesis/symbolicRule.go b/pkg/synthesis/symbolicRule.go index 75f22da..cbfffd1 100644 --- a/pkg/synthesis/symbolicRule.go +++ b/pkg/synthesis/symbolicRule.go @@ -150,9 +150,8 @@ func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, } func strAllowOnlyPolicy(policy *symbolicPolicy) string { - return fmt.Sprintf("\ninbound allow only rules\n~~~~~~~~~~~~~~~~~~~~~~~~~\n") + - strAllowOnlyPathsOfRules(policy.inbound) + - fmt.Sprintf("\noutbound allow only rules\n~~~~~~~~~~~~~~~~~~~~~~~~~\n") + + return fmt.Sprintf("Allow Only Rules\n~~~~~~~~~~~~~~~~~\ninbound rules\n") + + strAllowOnlyPathsOfRules(policy.inbound) + fmt.Sprintf("\noutbound rules\n") + strAllowOnlyPathsOfRules(policy.outbound) } @@ -163,7 +162,7 @@ func strAllowOnlyPathsOfRules(rules []*symbolicRule) string { continue } for _, path := range rule.allowOnlyRulePaths { - res = append(res, path.String()) + res = append(res, "\t"+path.String()) } } return strings.Join(res, "\n") diff --git a/pkg/synthesis/synthesis_test.go b/pkg/synthesis/synthesis_test.go index 0dba6bb..3a94ee3 100644 --- a/pkg/synthesis/synthesis_test.go +++ b/pkg/synthesis/synthesis_test.go @@ -31,7 +31,6 @@ var allTests = []synthesisTest{ }, } -// todo: extract common code and use in runSynthesis func (a *synthesisTest) runPreprocessing(t *testing.T) { rc := data.ExamplesGeneration(a.exData) parser := model.NewNSXConfigParserFromResourcesContainer(rc) @@ -40,7 +39,7 @@ func (a *synthesisTest) runPreprocessing(t *testing.T) { config := parser.GetConfig() categoryToPolicy := preProcessing(config.Fw.CategoriesSpecs) fmt.Println(stringCategoryToSymbolicPolicy(categoryToPolicy)) - expectedOutputFileName := filepath.Join(getTestsDirOut(), a.name+".txt") + expectedOutputFileName := filepath.Join(getTestsDirOut(), a.name+"_PreProcessing.txt") expectedOutput, err2 := os.ReadFile(expectedOutputFileName) require.Nil(t, err2) expectedOutputStr := string(expectedOutput) @@ -56,18 +55,24 @@ func TestPreprocessing(t *testing.T) { } } -func (a *synthesisTest) runSynthesis(t *testing.T) { +func (a *synthesisTest) runConvertToAbstract(t *testing.T) { rc := data.ExamplesGeneration(a.exData) allowOnlyPolicy, err := NSXToAbstractModelSynthesis(rc) require.Nil(t, err) fmt.Println(strAllowOnlyPolicy(allowOnlyPolicy)) + expectedOutputFileName := filepath.Join(getTestsDirOut(), a.name+"_ConvertToAbstract.txt") + expectedOutput, err2 := os.ReadFile(expectedOutputFileName) + require.Nil(t, err2) + expectedOutputStr := string(expectedOutput) + require.Equal(t, cleanStr(strAllowOnlyPolicy(allowOnlyPolicy)), cleanStr(expectedOutputStr), + "output not as expected") } func TestSynthesis(t *testing.T) { logging.Init(logging.HighVerbosity) for i := range allTests { test := &allTests[i] - test.runSynthesis(t) + test.runConvertToAbstract(t) } } diff --git a/pkg/synthesis/tests_expected_output/ExampleDumbeldore_ConvertToAbstract.txt b/pkg/synthesis/tests_expected_output/ExampleDumbeldore_ConvertToAbstract.txt new file mode 100644 index 0000000..e132ce0 --- /dev/null +++ b/pkg/synthesis/tests_expected_output/ExampleDumbeldore_ConvertToAbstract.txt @@ -0,0 +1,8 @@ +Allow Only Rules +~~~~~~~~~~~~~~~~~ +inbound rules + All Connections from (group = DumbledoreAll) to (*) + All Connections from (group = DumbledoreNoSly) to (*) +outbound rules + All Connections from (group = DumbledoreAll) to (*) + All Connections from (group = DumbledoreNoSly) to (*) \ No newline at end of file diff --git a/pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt b/pkg/synthesis/tests_expected_output/ExampleDumbeldore_PreProcessing.txt similarity index 100% rename from pkg/synthesis/tests_expected_output/ExampleDumbeldore.txt rename to pkg/synthesis/tests_expected_output/ExampleDumbeldore_PreProcessing.txt From 2b1d102e3039b8286d6d86acc4631d332ce52167 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 14:16:16 +0200 Subject: [PATCH 71/76] lint --- pkg/synthesis/symbolicRule.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pkg/synthesis/symbolicRule.go b/pkg/synthesis/symbolicRule.go index cbfffd1..091da4a 100644 --- a/pkg/synthesis/symbolicRule.go +++ b/pkg/synthesis/symbolicRule.go @@ -104,7 +104,6 @@ func computeAllowOnlyInboundOrOutbound(originalRules []*symbolicRule, globalDeni newAllows, newDenies := computeAllowOnlyForCategory(&originalRules, globalDenies) *globalDenies = append(*globalDenies, *newDenies...) return newAllows - } // computes allow only rules, using the following algorithm: @@ -150,8 +149,8 @@ func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, } func strAllowOnlyPolicy(policy *symbolicPolicy) string { - return fmt.Sprintf("Allow Only Rules\n~~~~~~~~~~~~~~~~~\ninbound rules\n") + - strAllowOnlyPathsOfRules(policy.inbound) + fmt.Sprintf("\noutbound rules\n") + + return "Allow Only Rules\n~~~~~~~~~~~~~~~~~\ninbound rules\n" + + strAllowOnlyPathsOfRules(policy.inbound) + "\noutbound rules\n" + strAllowOnlyPathsOfRules(policy.outbound) } From e02a4bb52fa4e1ba619eacada679f17af5b27b78 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 14:23:26 +0200 Subject: [PATCH 72/76] restore file to ease CR --- pkg/synthesis/symbolicPolicy.go | 64 +++++++++++++++++++++++++++++++++ pkg/synthesis/symbolicRule.go | 58 ------------------------------ 2 files changed, 64 insertions(+), 58 deletions(-) create mode 100644 pkg/synthesis/symbolicPolicy.go diff --git a/pkg/synthesis/symbolicPolicy.go b/pkg/synthesis/symbolicPolicy.go new file mode 100644 index 0000000..a62dcd3 --- /dev/null +++ b/pkg/synthesis/symbolicPolicy.go @@ -0,0 +1,64 @@ +package synthesis + +import ( + "fmt" + "strings" + + "github.com/np-guard/vmware-analyzer/pkg/model/dfw" + "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" +) + +///////////////////////////////////////////////////////////////////////////////////// +// preprocessing related functionality +///////////////////////////////////////////////////////////////////////////////////// + +// preProcessing: convert policy from spec to symbolicPolicy struct +func preProcessing(categoriesSpecs []*dfw.CategorySpec) (categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) { + categoryToPolicy = map[dfw.DfwCategory]*symbolicPolicy{} + for _, category := range categoriesSpecs { + categoryPolicy := symbolicPolicy{} + if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { + continue + } + categoryPolicy.inbound = append(categoryPolicy.inbound, convertRulesToSymbolicPaths(category.ProcessedRules.Inbound, + category.Category)...) + categoryPolicy.outbound = append(categoryPolicy.outbound, convertRulesToSymbolicPaths(category.ProcessedRules.Outbound, + category.Category)...) + + categoryToPolicy[category.Category] = &categoryPolicy + } + return categoryToPolicy +} + +func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) []*symbolicRule { + res := make([]*symbolicRule, len(rules)) + for i, rule := range rules { + ruleSymbolicPaths := symbolicexpr.ConvertFWRuleToSymbolicPaths(rule) + res[i] = &symbolicRule{origRule: rule, origRuleCategory: category, origSymbolicPaths: ruleSymbolicPaths} + } + return res +} + +func (policy symbolicPolicy) string() string { + return fmt.Sprintf("symbolic inbound rules:\n%v\nsymbolic outbound rules:\n%v", strSymbolicRules(policy.inbound), + strSymbolicRules(policy.outbound)) +} + +func strSymbolicRules(rules []*symbolicRule) string { + resStr := make([]string, len(rules)) + for i, rule := range rules { + resStr[i] = fmt.Sprintf("\t%v. action: %v paths: %v", i, rule.origRule.Action, rule.origSymbolicPaths) + } + return strings.Join(resStr, "\n") +} + +func stringCategoryToSymbolicPolicy(categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) string { + res := []string{} + for category, policy := range categoryToPolicy { + if len(policy.inbound) > 0 || len(policy.outbound) > 0 { + res = append(res, fmt.Sprintf("category: %s\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n%v", + category.String(), policy.string())) + } + } + return strings.Join(res, "\n") +} diff --git a/pkg/synthesis/symbolicRule.go b/pkg/synthesis/symbolicRule.go index 091da4a..4b10c19 100644 --- a/pkg/synthesis/symbolicRule.go +++ b/pkg/synthesis/symbolicRule.go @@ -1,70 +1,12 @@ package synthesis import ( - "fmt" "strings" "github.com/np-guard/vmware-analyzer/pkg/model/dfw" "github.com/np-guard/vmware-analyzer/pkg/symbolicexpr" ) -///////////////////////////////////////////////////////////////////////////////////// -// preprocessing related functionality -///////////////////////////////////////////////////////////////////////////////////// - -// todo: one category may have few instances in the original NSX policy; need to chain these to one - -// preProcessing: convert policy from spec to symbolicPolicy struct -func preProcessing(categoriesSpecs []*dfw.CategorySpec) (categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) { - categoryToPolicy = map[dfw.DfwCategory]*symbolicPolicy{} - for _, category := range categoriesSpecs { - categoryPolicy := symbolicPolicy{} - if len(category.ProcessedRules.Outbound)+len(category.ProcessedRules.Inbound) == 0 { - continue - } - categoryPolicy.inbound = append(categoryPolicy.inbound, convertRulesToSymbolicPaths(category.ProcessedRules.Inbound, - category.Category)...) - categoryPolicy.outbound = append(categoryPolicy.outbound, convertRulesToSymbolicPaths(category.ProcessedRules.Outbound, - category.Category)...) - - categoryToPolicy[category.Category] = &categoryPolicy - } - return categoryToPolicy -} - -func convertRulesToSymbolicPaths(rules []*dfw.FwRule, category dfw.DfwCategory) []*symbolicRule { - res := make([]*symbolicRule, len(rules)) - for i, rule := range rules { - ruleSymbolicPaths := symbolicexpr.ConvertFWRuleToSymbolicPaths(rule) - res[i] = &symbolicRule{origRule: rule, origRuleCategory: category, origSymbolicPaths: ruleSymbolicPaths} - } - return res -} - -func (policy symbolicPolicy) string() string { - return fmt.Sprintf("symbolic inbound rules:\n%v\nsymbolic outbound rules:\n%v", strSymbolicRules(policy.inbound), - strSymbolicRules(policy.outbound)) -} - -func strSymbolicRules(rules []*symbolicRule) string { - resStr := make([]string, len(rules)) - for i, rule := range rules { - resStr[i] = fmt.Sprintf("\t%v. action: %v paths: %v", i, rule.origRule.Action, rule.origSymbolicPaths) - } - return strings.Join(resStr, "\n") -} - -func stringCategoryToSymbolicPolicy(categoryToPolicy map[dfw.DfwCategory]*symbolicPolicy) string { - res := []string{} - for category, policy := range categoryToPolicy { - if len(policy.inbound) > 0 || len(policy.outbound) > 0 { - res = append(res, fmt.Sprintf("category: %s\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n%v", - category.String(), policy.string())) - } - } - return strings.Join(res, "\n") -} - ///////////////////////////////////////////////////////////////////////////////////// // convert symbolic rules to allow only functionality ///////////////////////////////////////////////////////////////////////////////////// From c3f1ce917893c9d04f51874bbd03194019d8ec5c Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 14:25:53 +0200 Subject: [PATCH 73/76] renaming --- pkg/synthesis/{symbolicRule.go => allowOnlyConversion.go} | 0 pkg/synthesis/{symbolicPolicy.go => preProcessing.go} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename pkg/synthesis/{symbolicRule.go => allowOnlyConversion.go} (100%) rename pkg/synthesis/{symbolicPolicy.go => preProcessing.go} (100%) diff --git a/pkg/synthesis/symbolicRule.go b/pkg/synthesis/allowOnlyConversion.go similarity index 100% rename from pkg/synthesis/symbolicRule.go rename to pkg/synthesis/allowOnlyConversion.go diff --git a/pkg/synthesis/symbolicPolicy.go b/pkg/synthesis/preProcessing.go similarity index 100% rename from pkg/synthesis/symbolicPolicy.go rename to pkg/synthesis/preProcessing.go From fcc67732a96e707b9a06c7316173596a906bb94a Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 15:06:49 +0200 Subject: [PATCH 74/76] bug fix --- pkg/synthesis/allowOnlyConversion.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/synthesis/allowOnlyConversion.go b/pkg/synthesis/allowOnlyConversion.go index 4b10c19..10ea8ec 100644 --- a/pkg/synthesis/allowOnlyConversion.go +++ b/pkg/synthesis/allowOnlyConversion.go @@ -72,7 +72,6 @@ func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, newGlobalDenies := symbolicexpr.SymbolicPaths{} copy(newGlobalDenies, *globalDenies) for _, rule := range *inboundOrOutbound { - symbolicDeniesAndPasses := symbolicexpr.SymbolicPaths{} switch rule.origRule.Action { case dfw.ActionJumpToApp: categoryPasses = append(categoryPasses, *rule.origSymbolicPaths...) @@ -80,6 +79,8 @@ func computeAllowOnlyForCategory(inboundOrOutbound *[]*symbolicRule, newSymbolicPaths := symbolicexpr.ComputeAllowGivenDenies(rule.origSymbolicPaths, &categoryPasses) newGlobalDenies = append(newGlobalDenies, *newSymbolicPaths...) case dfw.ActionAllow: + symbolicDeniesAndPasses := symbolicexpr.SymbolicPaths{} + symbolicDeniesAndPasses = append(symbolicDeniesAndPasses, newGlobalDenies...) symbolicDeniesAndPasses = append(symbolicDeniesAndPasses, categoryPasses...) newSymbolicPaths := symbolicexpr.ComputeAllowGivenDenies(rule.origSymbolicPaths, &symbolicDeniesAndPasses) newRule := &symbolicRule{origRule: rule.origRule, origRuleCategory: rule.origRuleCategory, From 13f51aebe5c05274c0770f549e767ed0a9775af6 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 15:07:42 +0200 Subject: [PATCH 75/76] fix test expected output --- .../ExampleDumbeldore_ConvertToAbstract.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/synthesis/tests_expected_output/ExampleDumbeldore_ConvertToAbstract.txt b/pkg/synthesis/tests_expected_output/ExampleDumbeldore_ConvertToAbstract.txt index e132ce0..2f4d6b9 100644 --- a/pkg/synthesis/tests_expected_output/ExampleDumbeldore_ConvertToAbstract.txt +++ b/pkg/synthesis/tests_expected_output/ExampleDumbeldore_ConvertToAbstract.txt @@ -2,7 +2,7 @@ Allow Only Rules ~~~~~~~~~~~~~~~~~ inbound rules All Connections from (group = DumbledoreAll) to (*) - All Connections from (group = DumbledoreNoSly) to (*) + All Connections from (group = DumbledoreNoSly) to (group != Slytherin) outbound rules All Connections from (group = DumbledoreAll) to (*) - All Connections from (group = DumbledoreNoSly) to (*) \ No newline at end of file + All Connections from (group = DumbledoreNoSly) to (group != Slytherin) \ No newline at end of file From dd9350ca7fdfbf13c28ee434b43ee9d77cda7ad7 Mon Sep 17 00:00:00 2001 From: shirim Date: Wed, 25 Dec 2024 15:29:10 +0200 Subject: [PATCH 76/76] fix test expected output --- pkg/synthesis/allowOnlyConversion.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/pkg/synthesis/allowOnlyConversion.go b/pkg/synthesis/allowOnlyConversion.go index 10ea8ec..1b040d8 100644 --- a/pkg/synthesis/allowOnlyConversion.go +++ b/pkg/synthesis/allowOnlyConversion.go @@ -25,9 +25,7 @@ func computeAllowOnlyRulesForPolicy(categoriesSpecs []*dfw.CategorySpec, &globalInboundDenies, &globalOutboundDenies) allowOnlyPolicy.inbound = append(allowOnlyPolicy.inbound, inboundAllow...) allowOnlyPolicy.outbound = append(allowOnlyPolicy.outbound, outboundAllow...) - // todo: handle default rule } - return allowOnlyPolicy }