Parse json and count do group by

I have below json string I wanted to count the number of unique metrics, is there is easy way to do it
for example from the below I wanted to count the number of unique loadgenerator like from below string it is 3

load_generator_84, load_generator_86, load_generator_89

I wanted to do this as less code as possible

I can get the array lieke like this
records, ok := result[“logEvents”].([]interface{})

after this I am think of using some regular exp to get the count?

what do you guys think
using for loop on all the values will be not efficient i guess.
any suggestions?

json string =

{
	"logEvents": [{
		"message": "{\"bufferLib\":\"Undefined\",\"_slack\":{\"lbMetrics\":[{\"Namespace\":\"default\",\"Dimensions\":[[\"item_index\",\"batch_index\",\"bufferLib\"],[\"bufferLib\"],[\"bufferLib\",\"item_index\"],[\"bufferLib\",\"batch_index\"]],\"Metrics\":[{\"Name\":\"load_generator_84\",\"Unit\":\"\"}]}],\"Timestamp\":1596838449873},\"batch_index\":\"batch_14185\",\"item_index\":\"item_2\",\"load_generator_84\":0}",
		"timestamp": 1596838449873
	}, {
		"message": "{\"bufferLib\":\"Undefined\",\"_slack\":{\"lbMetrics\":[{\"Namespace\":\"default\",\"Dimensions\":[[\"item_index\",\"batch_index\",\"bufferLib\"],[\"bufferLib\"],[\"bufferLib\",\"item_index\"],[\"bufferLib\",\"batch_index\"]],\"Metrics\":[{\"Name\":\"load_generator_84\",\"Unit\":\"\"}]}],\"Timestamp\":1596838449873},\"batch_index\":\"batch_14185\",\"item_index\":\"item_3\",\"load_generator_84\":0}",
		"timestamp": 1596838449873
	}, {
		"message": "{\"bufferLib\":\"Undefined\",\"_slack\":{\"lbMetrics\":[{\"Namespace\":\"default\",\"Dimensions\":[[\"item_index\",\"batch_index\",\"bufferLib\"],[\"bufferLib\"],[\"bufferLib\",\"item_index\"],[\"bufferLib\",\"batch_index\"]],\"Metrics\":[{\"Name\":\"load_generator_86\",\"Unit\":\"\"}]}],\"Timestamp\":1596838449873},\"batch_index\":\"batch_14185\",\"item_index\":\"item_4\",\"load_generator_86\":0}",
		"timestamp": 1596838449873
	}, {
		"message": "{\"bufferLib\":\"Undefined\",\"_slack\":{\"lbMetrics\":[{\"Namespace\":\"default\",\"Dimensions\":[[\"item_index\",\"batch_index\",\"bufferLib\"],[\"bufferLib\"],[\"bufferLib\",\"item_index\"],[\"bufferLib\",\"batch_index\"]],\"Metrics\":[{\"Name\":\"load_generator_89\",\"Unit\":\"\"}]}],\"Timestamp\":1596838449873},\"batch_index\":\"batch_14185\",\"item_index\":\"item_5\",\"load_generator_89\":0}",
		"timestamp": 1596838449873
	}, {
		"message": "{\"bufferLib\":\"Undefined\",\"_slack\":{\"lbMetrics\":[{\"Namespace\":\"default\",\"Dimensions\":[[\"item_index\",\"batch_index\",\"bufferLib\"],[\"bufferLib\"],[\"bufferLib\",\"item_index\"],[\"bufferLib\",\"batch_index\"]],\"Metrics\":[{\"Name\":\"load_generator_89\",\"Unit\":\"\"}]}],\"Timestamp\":1596838449873},\"batch_index\":\"batch_14185\",\"item_index\":\"item_6\",\"load_generator_89\":0}",
		"timestamp": 1596838449873
	}]
}

This seems to be JSON with JSON!

To be useful you’ll need to decode the message as JSON too.

I carried on parsing as map[string]interface{} etc to come up with this (playground)

func main() {
	var data map[string]interface{}
	err := json.Unmarshal([]byte(in), &data)
	if err != nil {
		log.Fatal(err)
	}
	fmt.Println(data)
	var foundNames = map[string]struct{}{}
	events := data["logEvents"].([]interface{})
	for _, event := range events {
		e := event.(map[string]interface{})
		message := e["message"].(string)
		var m map[string]interface{}
		err := json.Unmarshal([]byte(message), &m)
		if err != nil {
			log.Fatal(err)
		}
		slack := m["_slack"].(map[string]interface{})
		lbMetrics := slack["lbMetrics"].([]interface{})
		for _, lbMetric := range lbMetrics {
			metrics := lbMetric.(map[string]interface{})["Metrics"].([]interface{})
			for _, metric := range metrics {
				m := metric.(map[string]interface{})
				name := m["Name"].(string)
				fmt.Println(name)
				foundNames[name] = struct{}{}
			}
		}

	}
	fmt.Printf("found %d names", len(foundNames))

That is really ugly!

If I was doing this I’d parse the JSON properly like this

type Message struct {
	BufferLib string `json:"bufferLib"`
	Slack     struct {
		LbMetrics []struct {
			Namespace  string     `json:"Namespace"`
			Dimensions [][]string `json:"Dimensions"`
			Metrics    []struct {
				Name string `json:"Name"`
				Unit string `json:"Unit"`
			} `json:"Metrics"`
		} `json:"lbMetrics"`
		Timestamp int64 `json:"Timestamp"`
	} `json:"_slack"`
	BatchIndex      string `json:"batch_index"`
	ItemIndex       string `json:"item_index"`
	LoadGenerator84 int    `json:"load_generator_84"`
}

// A wrapper struct so we can attach the UnmarshalJSON method to it
type JSONMessage struct {
	Message
}

// Unmarshal the JSON in a string
func (m *JSONMessage) UnmarshalJSON(b []byte) error {
	var messageString string
	if err := json.Unmarshal(b, &messageString); err != nil {
		return err
	}
	if err := json.Unmarshal([]byte(messageString), &m.Message); err != nil {
		return err
	}
	return nil
}

type Events struct {
	LogEvents []struct {
		Message   JSONMessage `json:"message"`
		Timestamp int64       `json:"timestamp"`
	} `json:"logEvents"`
}

(Note the double struct trick to hide the UnmarshalJSON method when we unmarshal the inner Message).

then the main loop becomes (playground)

func main() {
	var data Events
	err := json.Unmarshal([]byte(in), &data)
	if err != nil {
		log.Fatal(err)
	}
	var foundNames = map[string]struct{}{}
	for _, logEvent := range data.LogEvents {
		for _, lbMetric := range logEvent.Message.Slack.LbMetrics {
			for _, metric := range lbMetric.Metrics {
				fmt.Println(metric.Name)
				foundNames[metric.Name] = struct{}{}
			}
		}
	}
	fmt.Printf("found = %d\n", len(foundNames))
}

Note that I used mholt’s excellent JSON to Go tool to make the struct definitions - you just paste JSON in and it creates a Go structure for it.

1 Like

thank you so much @ncw this site is extremely useful to me.

1 Like

I hope it all made sense!

Parsing JSON in Go can be a little bit harder than other languages due to the static typing of Go.

However I find putting the JSON into JSON to Go to create a go structure makes life a lot easier!

There is a similar tool for XML also.

1 Like

This topic was automatically closed 90 days after the last reply. New replies are no longer allowed.