elasticsearchlogstashelastic-stacklogstash-jdbc

aggregate multiple recursive logstash


I am using logstash with input jdbc, and would like to embed one object inside another with aggregate. How can I use add recursive?

Ie add an object inside another object?

This would be an example:

{
  "_index": "my-index",
  "_type": "test",
  "_id": "1",
  "_version": 1,
  "_score": 1,
  "_source": {
    "id": "1",
    "properties": {
      "nested_1": [
        {
          "A": 0,
          "B": "true",
          "C": "PEREZ, MATIAS  ROGELIO Y/O",
          "Nested_2": [
            {
              "Z1": "true",
              "Z2": "99999"
            }
        },
        {
          "A": 0,
          "B": "true",
          "C": "SALVADOR MATIAS ROMERO",
          "Nested_2": [
            {
              "Z1": "true",
              "Z2": "99999"
            }
        }
      ]
    }
  }
}

I'm using something like that but it doesn't work

aggregate {
  task_id => "%{id}"
  code => "
      map['id'] = event.get('id')
      
      map['nested_1_list'] ||= []
      map['nested_1'] ||= []
      if (event.get('id') != nil)
        if !( map['nested_1_list'].include?event.get('id') ) 
          map['nested_1_list'] << event.get('id')
 
          map['nested_1'] << {
            'A' => event.get('a'),                             
            'B' => event.get('b'),
            'C' => event.get('c'),
            
             map['nested_2_list'] ||= []
              map['nested_2'] ||= []
              if (event.get('id_2') != nil)
                if !( map['nested_2_list'].include?event.get('id_2') ) 
                  map['nested_2_list'] << event.get('id_2')
         
                  map['nested_2'] << {
                    'Z1' => event.get('z1'), 
                    'Z2' => event.get('z2')
                  }
                end
              end
          }
        end
      end
       
      event.cancel()
  "
  push_previous_map_as_event => true
  timeout => 3

} 

Any idea how to implement this?........................ ..........


Solution

  • Finally what I did was, generate the JSON from the input, that is, from a stored procedure that is consumed from a view (vw) from the input statement of logstash.

    Once consumed, I process it as json and I already have that json to work as one more variable.

    # Convierto el string a json real (quita comillas y barras invertidas)
            ruby {
                code => "
                    require 'json'
                    json_value = JSON.parse(event.get('field_db').to_s)
                    event.set('field_convert_to_json',json_value)
                "
            }