Unverified Commit dd430c31 authored by M. Mert Yıldıran's avatar M. Mert Yıldıran Committed by GitHub
Browse files

Always derive the summary and method fields from the entry in the database on read (#877)

* Always derive the summary and method fields from the entry in the database on read

* Update the expected JSONs in the protocol unit tests

* Add test cases for `Summarize` method

* Remove unused `GetEntry` method, `DataUnmarshaler` struct and `UnmarshalData` method

* Temporarily enable the acceptance tests

* Temporarily disable Slack notification on failure

* Update the Cypress tests

* Fix an issue in Redis

* Fix a typo and the Cypress tests

* Revert "Temporarily disable Slack notification on failure"

This reverts commit cad1901e.

* Revert "Temporarily enable the acceptance tests"

This reverts commit bad7706c.
parent c1d774e5
Showing with 411 additions and 171 deletions
+411 -171
......@@ -65,14 +65,14 @@ export function checkThatAllEntriesShown() {
}
export function checkFilterByMethod(funcDict) {
const {protocol, method, summary, hugeMizu} = funcDict;
const summaryDict = getSummeryDict(summary);
const methodDict = getMethodDict(method);
const {protocol, method, methodQuery, summary, summaryQuery} = funcDict;
const summaryDict = getSummaryDict(summary, summaryQuery);
const methodDict = getMethodDict(method, methodQuery);
const protocolDict = getProtocolDict(protocol.name, protocol.text);
it(`Testing the method: ${method}`, function () {
// applying filter
cy.get('.w-tc-editor-text').clear().type(`method == "${method}"`);
cy.get('.w-tc-editor-text').clear().type(methodQuery);
cy.get('[type="submit"]').click();
cy.get('.w-tc-editor').should('have.attr', 'style').and('include', Cypress.env('greenFilterColor'));
......@@ -121,7 +121,7 @@ function resizeIfNeeded(entriesLen) {
function deepCheck(generalDict, protocolDict, methodDict, entry) {
const entryNum = getEntryNumById(entry.id);
const {summary, value} = generalDict;
const summaryDict = getSummeryDict(summary);
const summaryDict = getSummaryDict(summary);
leftOnHoverCheck(entryNum, methodDict.pathLeft, methodDict.expectedOnHover);
leftOnHoverCheck(entryNum, protocolDict.pathLeft, protocolDict.expectedOnHover);
......@@ -149,13 +149,13 @@ function deepCheck(generalDict, protocolDict, methodDict, entry) {
}
}
function getSummeryDict(summary) {
if (summary) {
function getSummaryDict(value, query) {
if (value) {
return {
pathLeft: '> :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
pathRight: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
expectedText: summary,
expectedOnHover: `summary == "${summary}"`
expectedText: value,
expectedOnHover: query
};
}
else {
......@@ -163,12 +163,12 @@ function getSummeryDict(summary) {
}
}
function getMethodDict(method) {
function getMethodDict(value, query) {
return {
pathLeft: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
pathRight: '> :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
expectedText: method,
expectedOnHover: `method == "${method}"`
expectedText: value,
expectedOnHover: query
};
}
......
......@@ -9,41 +9,53 @@ const rabbitProtocolDetails = {name: 'AMQP', text: 'Advanced Message Queuing Pro
checkFilterByMethod({
protocol: rabbitProtocolDetails,
method: 'exchange declare',
methodQuery: 'request.method == "exchange declare"',
summary: 'exchange',
summaryQuery: 'request.exchange == "exchange"',
value: null
});
checkFilterByMethod({
protocol: rabbitProtocolDetails,
method: 'queue declare',
methodQuery: 'request.method == "queue declare"',
summary: 'queue',
summaryQuery: 'request.queue == "queue"',
value: null
});
checkFilterByMethod({
protocol: rabbitProtocolDetails,
method: 'queue bind',
methodQuery: 'request.method == "queue bind"',
summary: 'queue',
summaryQuery: 'request.queue == "queue"',
value: null
});
checkFilterByMethod({
protocol: rabbitProtocolDetails,
method: 'basic publish',
methodQuery: 'request.method == "basic publish"',
summary: 'exchange',
summaryQuery: 'request.exchange == "exchange"',
value: {tab: valueTabs.request, regex: /^message$/mg}
});
checkFilterByMethod({
protocol: rabbitProtocolDetails,
method: 'basic consume',
methodQuery: 'request.method == "basic consume"',
summary: 'queue',
summaryQuery: 'request.queue == "queue"',
value: null
});
checkFilterByMethod({
protocol: rabbitProtocolDetails,
method: 'basic deliver',
methodQuery: 'request.method == "basic deliver"',
summary: 'exchange',
summaryQuery: 'request.queue == "exchange"',
value: {tab: valueTabs.request, regex: /^message$/mg}
});
......@@ -9,34 +9,44 @@ const redisProtocolDetails = {name: 'redis', text: 'Redis Serialization Protocol
checkFilterByMethod({
protocol: redisProtocolDetails,
method: 'PING',
methodQuery: 'request.command == "PING"',
summary: null,
summaryQuery: '',
value: null
})
checkFilterByMethod({
protocol: redisProtocolDetails,
method: 'SET',
methodQuery: 'request.command == "SET"',
summary: 'key',
summaryQuery: 'request.key == "key"',
value: {tab: valueTabs.request, regex: /^\[value, keepttl]$/mg}
})
checkFilterByMethod({
protocol: redisProtocolDetails,
method: 'EXISTS',
methodQuery: 'request.command == "EXISTS"',
summary: 'key',
summaryQuery: 'request.key == "key"',
value: {tab: valueTabs.response, regex: /^1$/mg}
})
checkFilterByMethod({
protocol: redisProtocolDetails,
method: 'GET',
methodQuery: 'request.command == "GET"',
summary: 'key',
summaryQuery: 'request.key == "key"',
value: {tab: valueTabs.response, regex: /^value$/mg}
})
checkFilterByMethod({
protocol: redisProtocolDetails,
method: 'DEL',
methodQuery: 'request.command == "DEL"',
summary: 'key',
summaryQuery: 'request.key == "key"',
value: {tab: valueTabs.response, regex: /^1$|^0$/mg}
})
......@@ -113,7 +113,7 @@ if (Cypress.env('shouldCheckSrcAndDest')) {
}
checkFilter({
name: 'method == "GET"',
name: 'request.method == "GET"',
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
leftSideExpectedText: 'GET',
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(1) > :nth-child(2)',
......@@ -122,7 +122,7 @@ checkFilter({
});
checkFilter({
name: 'summary == "/get"',
name: 'request.path == "/get"',
leftSidePath: '> :nth-child(3) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
leftSideExpectedText: '/get',
rightSidePath: '> :nth-child(2) > :nth-child(2) > :nth-child(1) > :nth-child(2) > :nth-child(2)',
......@@ -139,7 +139,7 @@ checkFilter({
applyByEnter: false
});
checkFilterNoResults('method == "POST"');
checkFilterNoResults('request.method == "POST"');
function checkFilterNoResults(filterName) {
it(`checking the filter: ${filterName}. Expecting no results`, function () {
......
......@@ -17,6 +17,12 @@ import (
tapApi "github.com/up9inc/mizu/tap/api"
)
var extensionsMap map[string]*tapApi.Extension // global
func InitExtensionsMap(ref map[string]*tapApi.Extension) {
extensionsMap = ref
}
type EventHandlers interface {
WebSocketConnect(socketId int, isTapper bool)
WebSocketDisconnect(socketId int, isTapper bool)
......@@ -165,7 +171,8 @@ func websocketHandler(w http.ResponseWriter, r *http.Request, eventHandlers Even
if params.EnableFullEntries {
message, _ = models.CreateFullEntryWebSocketMessage(entry)
} else {
base := tapApi.Summarize(entry)
extension := extensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
message, _ = models.CreateBaseEntryWebSocketMessage(base)
}
......
......@@ -60,6 +60,7 @@ func LoadExtensions() {
})
controllers.InitExtensionsMap(ExtensionsMap)
api.InitExtensionsMap(ExtensionsMap)
}
func ConfigureBasenineServer(host string, port string, dbSize int64, logLevel logging.Level, insertionFilter string) {
......
......@@ -77,7 +77,8 @@ func GetEntries(c *gin.Context) {
return // exit
}
base := tapApi.Summarize(entry)
extension := extensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
dataSlice = append(dataSlice, base)
}
......@@ -123,6 +124,7 @@ func GetEntry(c *gin.Context) {
}
extension := extensionsMap[entry.Protocol.Name]
base := extension.Dissector.Summarize(entry)
representation, bodySize, _ := extension.Dissector.Represent(entry.Request, entry.Response)
var rules []map[string]interface{}
......@@ -142,6 +144,7 @@ func GetEntry(c *gin.Context) {
Representation: string(representation),
BodySize: bodySize,
Data: entry,
Base: base,
Rules: rules,
IsRulesEnabled: isRulesEnabled,
})
......
......@@ -80,11 +80,7 @@ type httpEntry struct {
CreatedAt time.Time `json:"createdAt"`
Request map[string]interface{} `json:"request"`
Response map[string]interface{} `json:"response"`
Summary string `json:"summary"`
Method string `json:"method"`
Status int `json:"status"`
ElapsedTime int64 `json:"elapsedTime"`
Path string `json:"path"`
}
func (client *client) PushEntry(entry *api.Entry) {
......@@ -103,11 +99,7 @@ func (client *client) PushEntry(entry *api.Entry) {
CreatedAt: entry.StartTime,
Request: entry.Request,
Response: entry.Response,
Summary: entry.Summary,
Method: entry.Method,
Status: entry.Status,
ElapsedTime: entry.ElapsedTime,
Path: entry.Path,
}
entryJson, err := json.Marshal(entryToPush)
......
......@@ -12,10 +12,6 @@ import (
"github.com/up9inc/mizu/tap"
)
func GetEntry(r *tapApi.Entry, v tapApi.DataUnmarshaler) error {
return v.UnmarshalData(r)
}
type TapConfig struct {
TappedNamespaces map[string]bool `json:"tappedNamespaces"`
}
......
......@@ -100,6 +100,7 @@ type Dissector interface {
Ping()
Dissect(b *bufio.Reader, isClient bool, tcpID *TcpID, counterPair *CounterPair, superTimer *SuperTimer, superIdentifier *SuperIdentifier, emitter Emitter, options *TrafficFilteringOptions, reqResMatcher RequestResponseMatcher) error
Analyze(item *OutputChannelItem, resolvedSource string, resolvedDestination string, namespace string) *Entry
Summarize(entry *Entry) *BaseEntry
Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, bodySize int64, err error)
Macros() map[string]string
NewResponseRequestMatcher() RequestResponseMatcher
......@@ -135,12 +136,7 @@ type Entry struct {
StartTime time.Time `json:"startTime"`
Request map[string]interface{} `json:"request"`
Response map[string]interface{} `json:"response"`
Summary string `json:"summary"`
Method string `json:"method"`
Status int `json:"status"`
ElapsedTime int64 `json:"elapsedTime"`
Path string `json:"path"`
IsOutgoing bool `json:"isOutgoing,omitempty"`
Rules ApplicableRules `json:"rules,omitempty"`
ContractStatus ContractStatus `json:"contractStatus,omitempty"`
ContractRequestReason string `json:"contractRequestReason,omitempty"`
......@@ -154,6 +150,7 @@ type EntryWrapper struct {
Representation string `json:"representation"`
BodySize int64 `json:"bodySize"`
Data *Entry `json:"data"`
Base *BaseEntry `json:"base"`
Rules []map[string]interface{} `json:"rulesMatched,omitempty"`
IsRulesEnabled bool `json:"isRulesEnabled"`
}
......@@ -161,11 +158,12 @@ type EntryWrapper struct {
type BaseEntry struct {
Id uint `json:"id"`
Protocol Protocol `json:"proto,omitempty"`
Url string `json:"url,omitempty"`
Path string `json:"path,omitempty"`
Summary string `json:"summary,omitempty"`
StatusCode int `json:"status"`
SummaryQuery string `json:"summaryQuery,omitempty"`
Status int `json:"status"`
StatusQuery string `json:"statusQuery"`
Method string `json:"method,omitempty"`
MethodQuery string `json:"methodQuery,omitempty"`
Timestamp int64 `json:"timestamp,omitempty"`
Source *TCP `json:"src"`
Destination *TCP `json:"dst"`
......@@ -190,44 +188,6 @@ type Contract struct {
Content string `json:"content"`
}
func Summarize(entry *Entry) *BaseEntry {
return &BaseEntry{
Id: entry.Id,
Protocol: entry.Protocol,
Path: entry.Path,
Summary: entry.Summary,
StatusCode: entry.Status,
Method: entry.Method,
Timestamp: entry.Timestamp,
Source: entry.Source,
Destination: entry.Destination,
IsOutgoing: entry.IsOutgoing,
Latency: entry.ElapsedTime,
Rules: entry.Rules,
ContractStatus: entry.ContractStatus,
}
}
type DataUnmarshaler interface {
UnmarshalData(*Entry) error
}
func (bed *BaseEntry) UnmarshalData(entry *Entry) error {
bed.Protocol = entry.Protocol
bed.Id = entry.Id
bed.Path = entry.Path
bed.Summary = entry.Summary
bed.StatusCode = entry.Status
bed.Method = entry.Method
bed.Timestamp = entry.Timestamp
bed.Source = entry.Source
bed.Destination = entry.Destination
bed.IsOutgoing = entry.IsOutgoing
bed.Latency = entry.ElapsedTime
bed.ContractStatus = entry.ContractStatus
return nil
}
const (
TABLE string = "table"
BODY string = "body"
......
......@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect/amqp/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/amqp/\* expect
......@@ -219,31 +219,6 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
request := item.Pair.Request.Payload.(map[string]interface{})
reqDetails := request["details"].(map[string]interface{})
summary := ""
switch request["method"] {
case basicMethodMap[40]:
summary = reqDetails["exchange"].(string)
case basicMethodMap[60]:
summary = reqDetails["exchange"].(string)
case exchangeMethodMap[10]:
summary = reqDetails["exchange"].(string)
case queueMethodMap[10]:
summary = reqDetails["queue"].(string)
case connectionMethodMap[10]:
summary = fmt.Sprintf(
"%s.%s",
strconv.Itoa(int(reqDetails["versionMajor"].(float64))),
strconv.Itoa(int(reqDetails["versionMinor"].(float64))),
)
case connectionMethodMap[50]:
summary = reqDetails["replyText"].(string)
case queueMethodMap[20]:
summary = reqDetails["queue"].(string)
case basicMethodMap[20]:
summary = reqDetails["queue"].(string)
}
request["url"] = summary
reqDetails["method"] = request["method"]
return &api.Entry{
Protocol: protocol,
......@@ -260,17 +235,70 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
Namespace: namespace,
Outgoing: item.ConnectionInfo.IsOutgoing,
Request: reqDetails,
Method: request["method"].(string),
Status: 0,
Timestamp: item.Timestamp,
StartTime: item.Pair.Request.CaptureTime,
ElapsedTime: 0,
Summary: summary,
IsOutgoing: item.ConnectionInfo.IsOutgoing,
}
}
func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
summary := ""
summaryQuery := ""
method := entry.Request["method"].(string)
methodQuery := fmt.Sprintf(`request.method == "%s"`, method)
switch method {
case basicMethodMap[40]:
summary = entry.Request["exchange"].(string)
summaryQuery = fmt.Sprintf(`request.exchange == "%s"`, summary)
case basicMethodMap[60]:
summary = entry.Request["exchange"].(string)
summaryQuery = fmt.Sprintf(`request.exchange == "%s"`, summary)
case exchangeMethodMap[10]:
summary = entry.Request["exchange"].(string)
summaryQuery = fmt.Sprintf(`request.exchange == "%s"`, summary)
case queueMethodMap[10]:
summary = entry.Request["queue"].(string)
summaryQuery = fmt.Sprintf(`request.queue == "%s"`, summary)
case connectionMethodMap[10]:
versionMajor := int(entry.Request["versionMajor"].(float64))
versionMinor := int(entry.Request["versionMinor"].(float64))
summary = fmt.Sprintf(
"%s.%s",
strconv.Itoa(versionMajor),
strconv.Itoa(versionMinor),
)
summaryQuery = fmt.Sprintf(`request.versionMajor == %d and request.versionMinor == %d`, versionMajor, versionMinor)
case connectionMethodMap[50]:
summary = entry.Request["replyText"].(string)
summaryQuery = fmt.Sprintf(`request.replyText == "%s"`, summary)
case queueMethodMap[20]:
summary = entry.Request["queue"].(string)
summaryQuery = fmt.Sprintf(`request.queue == "%s"`, summary)
case basicMethodMap[20]:
summary = entry.Request["queue"].(string)
summaryQuery = fmt.Sprintf(`request.queue == "%s"`, summary)
}
return &api.BaseEntry{
Id: entry.Id,
Protocol: entry.Protocol,
Summary: summary,
SummaryQuery: summaryQuery,
Status: 0,
StatusQuery: "",
Method: method,
MethodQuery: methodQuery,
Timestamp: entry.Timestamp,
Source: entry.Source,
Destination: entry.Destination,
IsOutgoing: entry.Outgoing,
Latency: entry.ElapsedTime,
Rules: entry.Rules,
ContractStatus: entry.ContractStatus,
}
}
func (d dissecting) Represent(request map[string]interface{}, response map[string]interface{}) (object []byte, bodySize int64, err error) {
bodySize = 0
representation := make(map[string]interface{})
......
......@@ -21,14 +21,16 @@ import (
const (
binDir = "bin"
patternBin = "*_req.bin"
patternDissect = "*.json"
patternExpect = "*.json"
msgDissecting = "Dissecting:"
msgAnalyzing = "Analyzing:"
msgSummarizing = "Summarizing:"
msgRepresenting = "Representing:"
respSuffix = "_res.bin"
expectDir = "expect"
dissectDir = "dissect"
analyzeDir = "analyze"
summarizeDir = "summarize"
representDir = "represent"
testUpdate = "TEST_UPDATE"
)
......@@ -186,7 +188,7 @@ func TestAnalyze(t *testing.T) {
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirDissect, patternDissect))
paths, err := filepath.Glob(path.Join(expectDirDissect, patternExpect))
if err != nil {
log.Fatal(err)
}
......@@ -230,6 +232,63 @@ func TestAnalyze(t *testing.T) {
}
}
func TestSummarize(t *testing.T) {
_, testUpdateEnabled := os.LookupEnv(testUpdate)
expectDirAnalyze := path.Join(expectDir, analyzeDir)
expectDirSummarize := path.Join(expectDir, summarizeDir)
if testUpdateEnabled {
os.RemoveAll(expectDirSummarize)
err := os.MkdirAll(expectDirSummarize, 0775)
assert.Nil(t, err)
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
if err != nil {
log.Fatal(err)
}
for _, _path := range paths {
fmt.Printf("%s %s\n", msgSummarizing, _path)
bytes, err := ioutil.ReadFile(_path)
assert.Nil(t, err)
var entries []*api.Entry
err = json.Unmarshal(bytes, &entries)
assert.Nil(t, err)
var baseEntries []*api.BaseEntry
for _, entry := range entries {
baseEntry := dissector.Summarize(entry)
baseEntries = append(baseEntries, baseEntry)
}
pathExpect := path.Join(expectDirSummarize, filepath.Base(_path))
marshaled, err := json.Marshal(baseEntries)
assert.Nil(t, err)
if testUpdateEnabled {
if len(baseEntries) > 0 {
err = os.WriteFile(pathExpect, marshaled, 0644)
assert.Nil(t, err)
}
} else {
if _, err := os.Stat(pathExpect); errors.Is(err, os.ErrNotExist) {
assert.Len(t, entries, 0)
} else {
expectedBytes, err := ioutil.ReadFile(pathExpect)
assert.Nil(t, err)
assert.JSONEq(t, string(expectedBytes), string(marshaled))
}
}
}
}
func TestRepresent(t *testing.T) {
_, testUpdateEnabled := os.LookupEnv(testUpdate)
......@@ -243,7 +302,7 @@ func TestRepresent(t *testing.T) {
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternDissect))
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
if err != nil {
log.Fatal(err)
}
......
......@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect2/http/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/http/\* expect
......@@ -231,7 +231,6 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
reqDetails["targetUri"] = reqDetails["url"]
reqDetails["path"] = path
reqDetails["pathSegments"] = strings.Split(path, "/")[1:]
reqDetails["summary"] = path
// Rearrange the maps for the querying
reqDetails["_headers"] = reqDetails["headers"]
......@@ -248,17 +247,11 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
reqDetails["_queryStringMerged"] = mapSliceMergeRepeatedKeys(reqDetails["_queryString"].([]interface{}))
reqDetails["queryString"] = mapSliceRebuildAsMap(reqDetails["_queryStringMerged"].([]interface{}))
method := reqDetails["method"].(string)
statusCode := int(resDetails["status"].(float64))
if item.Protocol.Abbreviation == "gRPC" {
resDetails["statusText"] = grpcStatusCodes[statusCode]
}
if item.Protocol.Version == "2.0" && !isRequestUpgradedH2C {
reqDetails["url"] = path
request["url"] = path
}
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
if elapsedTime < 0 {
elapsedTime = 0
......@@ -280,17 +273,40 @@ func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string,
Outgoing: item.ConnectionInfo.IsOutgoing,
Request: reqDetails,
Response: resDetails,
Method: method,
Status: statusCode,
Timestamp: item.Timestamp,
StartTime: item.Pair.Request.CaptureTime,
ElapsedTime: elapsedTime,
Summary: path,
IsOutgoing: item.ConnectionInfo.IsOutgoing,
HTTPPair: string(httpPair),
}
}
func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
summary := entry.Request["path"].(string)
summaryQuery := fmt.Sprintf(`request.path == "%s"`, summary)
method := entry.Request["method"].(string)
methodQuery := fmt.Sprintf(`request.method == "%s"`, method)
status := int(entry.Response["status"].(float64))
statusQuery := fmt.Sprintf(`response.status == %d`, status)
return &api.BaseEntry{
Id: entry.Id,
Protocol: entry.Protocol,
Summary: summary,
SummaryQuery: summaryQuery,
Status: status,
StatusQuery: statusQuery,
Method: method,
MethodQuery: methodQuery,
Timestamp: entry.Timestamp,
Source: entry.Source,
Destination: entry.Destination,
IsOutgoing: entry.Outgoing,
Latency: entry.ElapsedTime,
Rules: entry.Rules,
ContractStatus: entry.ContractStatus,
}
}
func representRequest(request map[string]interface{}) (repRequest []interface{}) {
details, _ := json.Marshal([]api.TableData{
{
......
......@@ -21,14 +21,16 @@ import (
const (
binDir = "bin"
patternBin = "*_req.bin"
patternDissect = "*.json"
patternExpect = "*.json"
msgDissecting = "Dissecting:"
msgAnalyzing = "Analyzing:"
msgSummarizing = "Summarizing:"
msgRepresenting = "Representing:"
respSuffix = "_res.bin"
expectDir = "expect"
dissectDir = "dissect"
analyzeDir = "analyze"
summarizeDir = "summarize"
representDir = "represent"
testUpdate = "TEST_UPDATE"
)
......@@ -188,7 +190,7 @@ func TestAnalyze(t *testing.T) {
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirDissect, patternDissect))
paths, err := filepath.Glob(path.Join(expectDirDissect, patternExpect))
if err != nil {
log.Fatal(err)
}
......@@ -232,6 +234,63 @@ func TestAnalyze(t *testing.T) {
}
}
func TestSummarize(t *testing.T) {
_, testUpdateEnabled := os.LookupEnv(testUpdate)
expectDirAnalyze := path.Join(expectDir, analyzeDir)
expectDirSummarize := path.Join(expectDir, summarizeDir)
if testUpdateEnabled {
os.RemoveAll(expectDirSummarize)
err := os.MkdirAll(expectDirSummarize, 0775)
assert.Nil(t, err)
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
if err != nil {
log.Fatal(err)
}
for _, _path := range paths {
fmt.Printf("%s %s\n", msgSummarizing, _path)
bytes, err := ioutil.ReadFile(_path)
assert.Nil(t, err)
var entries []*api.Entry
err = json.Unmarshal(bytes, &entries)
assert.Nil(t, err)
var baseEntries []*api.BaseEntry
for _, entry := range entries {
baseEntry := dissector.Summarize(entry)
baseEntries = append(baseEntries, baseEntry)
}
pathExpect := path.Join(expectDirSummarize, filepath.Base(_path))
marshaled, err := json.Marshal(baseEntries)
assert.Nil(t, err)
if testUpdateEnabled {
if len(baseEntries) > 0 {
err = os.WriteFile(pathExpect, marshaled, 0644)
assert.Nil(t, err)
}
} else {
if _, err := os.Stat(pathExpect); errors.Is(err, os.ErrNotExist) {
assert.Len(t, entries, 0)
} else {
expectedBytes, err := ioutil.ReadFile(pathExpect)
assert.Nil(t, err)
assert.JSONEq(t, string(expectedBytes), string(marshaled))
}
}
}
}
func TestRepresent(t *testing.T) {
_, testUpdateEnabled := os.LookupEnv(testUpdate)
......@@ -245,7 +304,7 @@ func TestRepresent(t *testing.T) {
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternDissect))
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
if err != nil {
log.Fatal(err)
}
......
......@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect/kafka/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/kafka/\* expect
......@@ -61,110 +61,148 @@ func (d dissecting) Dissect(b *bufio.Reader, isClient bool, tcpID *api.TcpID, co
func (d dissecting) Analyze(item *api.OutputChannelItem, resolvedSource string, resolvedDestination string, namespace string) *api.Entry {
request := item.Pair.Request.Payload.(map[string]interface{})
reqDetails := request["details"].(map[string]interface{})
apiKey := ApiKey(reqDetails["apiKey"].(float64))
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
if elapsedTime < 0 {
elapsedTime = 0
}
return &api.Entry{
Protocol: _protocol,
Source: &api.TCP{
Name: resolvedSource,
IP: item.ConnectionInfo.ClientIP,
Port: item.ConnectionInfo.ClientPort,
},
Destination: &api.TCP{
Name: resolvedDestination,
IP: item.ConnectionInfo.ServerIP,
Port: item.ConnectionInfo.ServerPort,
},
Namespace: namespace,
Outgoing: item.ConnectionInfo.IsOutgoing,
Request: reqDetails,
Response: item.Pair.Response.Payload.(map[string]interface{})["details"].(map[string]interface{}),
Timestamp: item.Timestamp,
StartTime: item.Pair.Request.CaptureTime,
ElapsedTime: elapsedTime,
}
}
func (d dissecting) Summarize(entry *api.Entry) *api.BaseEntry {
status := 0
statusQuery := ""
apiKey := ApiKey(entry.Request["apiKey"].(float64))
method := apiNames[apiKey]
methodQuery := fmt.Sprintf("request.apiKey == %d", int(entry.Request["apiKey"].(float64)))
summary := ""
summaryQuery := ""
switch apiKey {
case Metadata:
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
if _topics == nil {
break
}
topics := _topics.([]interface{})
for _, topic := range topics {
for i, topic := range topics {
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
}
if len(summary) > 0 {
summary = summary[:len(summary)-2]
summaryQuery = summaryQuery[:len(summaryQuery)-4]
}
case ApiVersions:
summary = reqDetails["clientID"].(string)
summary = entry.Request["clientID"].(string)
summaryQuery = fmt.Sprintf(`request.clientID == "%s"`, summary)
case Produce:
_topics := reqDetails["payload"].(map[string]interface{})["topicData"]
_topics := entry.Request["payload"].(map[string]interface{})["topicData"]
if _topics == nil {
break
}
topics := _topics.([]interface{})
for _, topic := range topics {
for i, topic := range topics {
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
summaryQuery += fmt.Sprintf(`request.payload.topicData[%d].topic == "%s" and`, i, summary)
}
if len(summary) > 0 {
summary = summary[:len(summary)-2]
summaryQuery = summaryQuery[:len(summaryQuery)-4]
}
case Fetch:
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
if _topics == nil {
break
}
topics := _topics.([]interface{})
for _, topic := range topics {
for i, topic := range topics {
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["topic"].(string))
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].topic == "%s" and`, i, summary)
}
if len(summary) > 0 {
summary = summary[:len(summary)-2]
summaryQuery = summaryQuery[:len(summaryQuery)-4]
}
case ListOffsets:
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
if _topics == nil {
break
}
topics := _topics.([]interface{})
for _, topic := range topics {
for i, topic := range topics {
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
}
if len(summary) > 0 {
summary = summary[:len(summary)-2]
summaryQuery = summaryQuery[:len(summaryQuery)-4]
}
case CreateTopics:
_topics := reqDetails["payload"].(map[string]interface{})["topics"]
_topics := entry.Request["payload"].(map[string]interface{})["topics"]
if _topics == nil {
break
}
topics := _topics.([]interface{})
for _, topic := range topics {
for i, topic := range topics {
summary += fmt.Sprintf("%s, ", topic.(map[string]interface{})["name"].(string))
summaryQuery += fmt.Sprintf(`request.payload.topics[%d].name == "%s" and`, i, summary)
}
if len(summary) > 0 {
summary = summary[:len(summary)-2]
summaryQuery = summaryQuery[:len(summaryQuery)-4]
}
case DeleteTopics:
if reqDetails["topicNames"] == nil {
if entry.Request["topicNames"] == nil {
break
}
topicNames := reqDetails["topicNames"].([]string)
for _, name := range topicNames {
topicNames := entry.Request["topicNames"].([]string)
for i, name := range topicNames {
summary += fmt.Sprintf("%s, ", name)
summaryQuery += fmt.Sprintf(`request.topicNames[%d] == "%s" and`, i, summary)
}
if len(summary) > 0 {
summary = summary[:len(summary)-2]
summaryQuery = summaryQuery[:len(summaryQuery)-4]
}
}
request["url"] = summary
elapsedTime := item.Pair.Response.CaptureTime.Sub(item.Pair.Request.CaptureTime).Round(time.Millisecond).Milliseconds()
if elapsedTime < 0 {
elapsedTime = 0
}
return &api.Entry{
Protocol: _protocol,
Source: &api.TCP{
Name: resolvedSource,
IP: item.ConnectionInfo.ClientIP,
Port: item.ConnectionInfo.ClientPort,
},
Destination: &api.TCP{
Name: resolvedDestination,
IP: item.ConnectionInfo.ServerIP,
Port: item.ConnectionInfo.ServerPort,
},
Namespace: namespace,
Outgoing: item.ConnectionInfo.IsOutgoing,
Request: reqDetails,
Response: item.Pair.Response.Payload.(map[string]interface{})["details"].(map[string]interface{}),
Method: apiNames[apiKey],
Status: 0,
Timestamp: item.Timestamp,
StartTime: item.Pair.Request.CaptureTime,
ElapsedTime: elapsedTime,
Summary: summary,
IsOutgoing: item.ConnectionInfo.IsOutgoing,
return &api.BaseEntry{
Id: entry.Id,
Protocol: entry.Protocol,
Summary: summary,
SummaryQuery: summaryQuery,
Status: status,
StatusQuery: statusQuery,
Method: method,
MethodQuery: methodQuery,
Timestamp: entry.Timestamp,
Source: entry.Source,
Destination: entry.Destination,
IsOutgoing: entry.Outgoing,
Latency: entry.ElapsedTime,
Rules: entry.Rules,
ContractStatus: entry.ContractStatus,
}
}
......
......@@ -21,14 +21,16 @@ import (
const (
binDir = "bin"
patternBin = "*_req.bin"
patternDissect = "*.json"
patternExpect = "*.json"
msgDissecting = "Dissecting:"
msgAnalyzing = "Analyzing:"
msgSummarizing = "Summarizing:"
msgRepresenting = "Representing:"
respSuffix = "_res.bin"
expectDir = "expect"
dissectDir = "dissect"
analyzeDir = "analyze"
summarizeDir = "summarize"
representDir = "represent"
testUpdate = "TEST_UPDATE"
)
......@@ -187,7 +189,7 @@ func TestAnalyze(t *testing.T) {
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirDissect, patternDissect))
paths, err := filepath.Glob(path.Join(expectDirDissect, patternExpect))
if err != nil {
log.Fatal(err)
}
......@@ -231,6 +233,63 @@ func TestAnalyze(t *testing.T) {
}
}
func TestSummarize(t *testing.T) {
_, testUpdateEnabled := os.LookupEnv(testUpdate)
expectDirAnalyze := path.Join(expectDir, analyzeDir)
expectDirSummarize := path.Join(expectDir, summarizeDir)
if testUpdateEnabled {
os.RemoveAll(expectDirSummarize)
err := os.MkdirAll(expectDirSummarize, 0775)
assert.Nil(t, err)
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
if err != nil {
log.Fatal(err)
}
for _, _path := range paths {
fmt.Printf("%s %s\n", msgSummarizing, _path)
bytes, err := ioutil.ReadFile(_path)
assert.Nil(t, err)
var entries []*api.Entry
err = json.Unmarshal(bytes, &entries)
assert.Nil(t, err)
var baseEntries []*api.BaseEntry
for _, entry := range entries {
baseEntry := dissector.Summarize(entry)
baseEntries = append(baseEntries, baseEntry)
}
pathExpect := path.Join(expectDirSummarize, filepath.Base(_path))
marshaled, err := json.Marshal(baseEntries)
assert.Nil(t, err)
if testUpdateEnabled {
if len(baseEntries) > 0 {
err = os.WriteFile(pathExpect, marshaled, 0644)
assert.Nil(t, err)
}
} else {
if _, err := os.Stat(pathExpect); errors.Is(err, os.ErrNotExist) {
assert.Len(t, entries, 0)
} else {
expectedBytes, err := ioutil.ReadFile(pathExpect)
assert.Nil(t, err)
assert.JSONEq(t, string(expectedBytes), string(marshaled))
}
}
}
}
func TestRepresent(t *testing.T) {
_, testUpdateEnabled := os.LookupEnv(testUpdate)
......@@ -244,7 +303,7 @@ func TestRepresent(t *testing.T) {
}
dissector := NewDissector()
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternDissect))
paths, err := filepath.Glob(path.Join(expectDirAnalyze, patternExpect))
if err != nil {
log.Fatal(err)
}
......
......@@ -13,4 +13,4 @@ test-pull-bin:
test-pull-expect:
@mkdir -p expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect/redis/\* expect
@[ "${skipexpect}" ] && echo "Skipping downloading expected JSONs" || gsutil -o 'GSUtil:parallel_process_count=5' -o 'GSUtil:parallel_thread_count=5' -m cp -r gs://static.up9.io/mizu/test-pcap/expect3/redis/\* expect
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment