Create one doc per timestamp with this json

Hi, I have this json and i need to create two docs, one per timestamp. It is possible?

[
{
"timestamp": "23:55:48T30-JAN-2019",
"filename": "xxxxx.nmon",
"host": "xxxxx",
"start_date": "30-JAN-2019",
"start_time": "00:01:12",
"serial": "xxxxx",
"lpar_name": "xxxxx",
"machine_type": "xxxxx",
"runname": "xxxxx",
"hardware": "xxxxx",
"OS": "AIX:6.1.9.2",
"user": "root",
"nmon_version": "topas_nmon",
"interval": 180,
"snapshots": 480,
"cpu01_user": 39.0,
"cpu01_sys": 9.9,
"cpu01_wait": 0.7,
"cpu01_idle": 50.5,
"cpu02_user": 23.4,
"cpu02_sys": 3.8,
"cpu02_wait": 0.4,
"cpu02_idle": 72.4,
"cpu03_user": 3.6,
"cpu03_sys": 1.0,
"cpu03_wait": 0.0,
"cpu03_idle": 95.3,
"cpu04_user": 3.5,
"cpu04_sys": 1.0,
"cpu04_wait": 0.0,
"cpu04_idle": 95.6,
"cpu_all_user": 14.0,
"cpu_all_sys": 3.2,
"cpu_all_wait": 0.2,
"cpu_all_idle": 82.6,
"lpar_phyiscal_cpu": 2.228,
"lpar_virtual_cpu": 12,
"lpar_logical_cpu": 48,
"lpar_pool_cpu_cpu": 50,
"lpar_entitled": 8.00,
"lpar_weight": 128,
"lpar_pool_idle": 0.00,
"lpar_used_all_cpu": 3.38,
"lpar_used_pool_cpu": 4.46,
"lpar_shared_flg": 1,
"lpar_capped_flg": 0,
"lpar_folded": 0,
"mem_real_free_mb": 23095.6,
"mem_virtual_free_mb": 61572.1,
"mem_virtual_size_mb": 61792.0,
"mem_real_size_mb": 151552.0,
"memnew_process": 60.2,
"memnew_fs_cache": 20.6,
"memnew_system": 3.9,
"memnew_free": 15.2,
"memnew_pinned": 5.7,
"memnew_user": 77.9,
"memuse_numperm": 20.6,
"memuse_minperm": 10.0,
"memuse_maxperm": 90.0,
"memuse_minfree": 960,
"memuse_maxfree": 1088,
"memuse_numclient": 20.3,
"memuse_maxyclient": 90.0,
"memuse_lruable_pages": 37646432.0,
"page_faults": 2244.7,
"page_pgin": 102.5,
"page_pgout": 62.6,
"page_pgsin": 0.0,
"page_pgsout": 0.0,
"page_reclaims": 0.0,
"page_scans": 0.0,
"page_cycles": 0.0,
"largepage_freepages": 0,
"largepage_usedpages": 0,
"largepage_pages": 0,
"largepage_highwater": 0,
"largepage_size_mb": 0,
"proc_runnable": 21.48,
"proc_swapin": 0.05,
"proc_pswitch": 30885,
"proc_syscall": 53912,
"proc_read": 1584,
"proc_write": 1388,
"proc_fork": 8,
"proc_exec": 8,
"proc_sem": 298,
"proc_msg": 0,
"proc_asleep_bufio": 0,
"proc_asleep_rawio": 0,
"proc_asleep_diocio": 0,
"file_iget": 0,
"file_namei": 5926,
"file_dirblk": 50,
"file_readch": 4395707,
"file_writech": 1509877,
"file_ttyrawch": 0,
"file_ttycanch": 0,
"file_ttyoutch": 0,
"net_total_KBs": 3407.6,
"net_total_packets": 4720.7,
"disk_read_KBs": 0.0,
"disk_write_KBs": 245.6,
"disk_xfers": 30.1,
"aio_procs": 0,
"aio_running": 0,
"aio_cpu": 0.0
},
{
"timestamp": "23:58:48T30-JAN-2019",
"filename": "xxxxx.nmon",
"host": "xxxxx",
"start_date": "30-JAN-2019",
"start_time": "00:01:12",
"serial": "xxxxx",
"lpar_name": "xxxxx",
"machine_type": "xxxxx",
"runname": "xxxxx",
"hardware": "xxxxx",
"OS": "AIX:6.1.9.2",
"user": "root",
"nmon_version": "topas_nmon",
"interval": 180,
"snapshots": 480,
"cpu01_user": 36.2,
"cpu01_sys": 10.4,
"cpu01_wait": 0.2,
"cpu01_idle": 53.3,
"cpu02_user": 19.8,
"cpu02_sys": 3.6,
"cpu02_wait": 0.1,
"cpu02_idle": 76.5,
"cpu03_user": 2.8,
"cpu03_sys": 0.9,
"cpu03_wait": 0.0,
"cpu03_idle": 96.3,
"cpu04_user": 2.7,
"cpu04_sys": 0.7,
"cpu04_wait": 0.0,
"cpu04_idle": 96.6,
"cpu_all_user": 12.7,
"cpu_all_sys": 3.1,
"cpu_all_wait": 0.1,
"cpu_all_idle": 84.1,
"lpar_phyiscal_cpu": 2.069,
"lpar_virtual_cpu": 12,
"lpar_logical_cpu": 48,
"lpar_pool_cpu_cpu": 50,
"lpar_entitled": 8.00,
"lpar_weight": 128,
"lpar_pool_idle": 0.00,
"lpar_used_all_cpu": 3.14,
"lpar_used_pool_cpu": 4.14,
"lpar_shared_flg": 1,
"lpar_capped_flg": 0,
"lpar_folded": 0,
"mem_real_free_mb": 23012.3,
"mem_virtual_free_mb": 61572.1,
"mem_virtual_size_mb": 61792.0,
"mem_real_size_mb": 151552.0,
"memnew_process": 60.3,
"memnew_fs_cache": 20.6,
"memnew_system": 3.9,
"memnew_free": 15.2,
"memnew_pinned": 5.7,
"memnew_user": 78.0,
"memuse_numperm": 20.6,
"memuse_minperm": 10.0,
"memuse_maxperm": 90.0,
"memuse_minfree": 960,
"memuse_maxfree": 1088,
"memuse_numclient": 20.3,
"memuse_maxyclient": 90.0,
"memuse_lruable_pages": 37646432.0,
"page_faults": 1504.6,
"page_pgin": 6.3,
"page_pgout": 51.6,
"page_pgsin": 0.0,
"page_pgsout": 0.0,
"page_reclaims": 0.0,
"page_scans": 0.0,
"page_cycles": 0.0,
"largepage_freepages": 0,
"largepage_usedpages": 0,
"largepage_pages": 0,
"largepage_highwater": 0,
"largepage_size_mb": 0,
"proc_runnable": 21.28,
"proc_swapin": 0.01,
"proc_pswitch": 30291,
"proc_syscall": 53734,
"proc_read": 1412,
"proc_write": 984,
"proc_fork": 3,
"proc_exec": 3,
"proc_sem": 303,
"proc_msg": 0,
"proc_asleep_bufio": 0,
"proc_asleep_rawio": 0,
"proc_asleep_diocio": 0,
"file_iget": 0,
"file_namei": 6848,
"file_dirblk": 19,
"file_readch": 2595113,
"file_writech": 870537,
"file_ttyrawch": 0,
"file_ttycanch": 0,
"file_ttyoutch": 0,
"net_total_KBs": 4020.5,
"net_total_packets": 5339.7,
"disk_read_KBs": 0.0,
"disk_write_KBs": 204.0,
"disk_xfers": 30.1,
"aio_procs": 0,
"aio_running": 0,
"aio_cpu": 0.0
}
]

If you ingest the entire JSON as a single event then yes, you can use a split filter to divide the array into two events.

    json { source => "message" target => "someField" remove_field => [ "message" ] }
    split { field => "someField" }

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.