input {
file {
type => "json"
codec => "json" {
charset => "utf-8"
}
path => "/test.json"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
json {
source => "message"
}
# split records tag
split {
field => "[records]"
}
# new json type append
if [records][so2Grade] {
mutate {
add_field => {
"so2Grade" => "%{[records][so2Grade]}"
"pm25Grade1h" => "%{[records][pm25Grade1h]}"
"pm10Value24" => "%{[records][pm10Value24]}"
"khaiValue" => "%{[records][khaiValue]}"
"so2Value" => "%{[records][so2Value]}"
"coValue" => "%{[records][coValue]}"
"pm10Grade1h" => "%{[records][pm10Grade1h]}"
"o3Grade" => "%{[records][o3Grade]}"
"pm10Value" => "%{[records][pm10Value]}"
"khaiGrade" => "%{[records][khaiGrade]}"
"pm25Value" => "%{[records][pm25Value]}"
"no2Grade" => "%{[records][no2Grade]}"
"pm25Value24" => "%{[records][pm25Value24]}"
"pm25Grade" => "%{[records][pm25Grade]}"
"mangName" => "%{[records][mangName]}"
"coGrade" => "%{[records][coGrade]}"
"dataTime" => "%{[records][dataTime]}"
"no2Value" => "%{[records][no2Value]}"
"pm10Grade" => "%{[records][pm10Grade]}"
"o3Value" => "%{[records][o3Value]}"
}
# remove split records
remove_field => [ "[fields]", "[records]" ]
}
# convert field
mutate {
convert => {
"so2Grade" => "float"
"pm25Grade1h" => "float"
"pm10Value24" => "float"
"khaiValue" => "float"
"so2Value" => "float"
"coValue" => "float"
"pm10Grade1h" => "float"
"o3Grade" => "float"
"pm10Value" => "float"
"khaiGrade" => "float"
"pm25Value" => "float"
"no2Grade" => "float"
"pm25Value24" => "float"
"pm25Grade" => "float"
"coGrade" => "float"
"no2Value" => "float"
"pm10Grade" => "float"
"o3Value" => "float"
"mangName" => "string"
}
}
}
output {
stdout { codec => json }
file {
codec => json_lines
path => "/output_test.json"
}
elasticsearch {
hosts => "localhost"
index => "atmosphere"
}
}
======================================================
input {
file {
type => "json"
#codec => "json"
codec => "plain"
path => "/media/sf_Class/jptest/jongrogu20190320.json"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
json {
source => "message"
}
# split records tag
split {
field => "[records]"
}
# new json type append
mutate {
# remove split records
remove_field => [ "[fields]", "[records]" ]
add_field => {
"so2Grade" => "%{[records][so2Grade]}"
"pm25Grade1h" => "%{[records][pm25Grade1h]}"
"pm10Value24" => "%{[records][pm10Value24]}"
"khaiValue" => "%{[records][khaiValue]}"
"so2Value" => "%{[records][so2Value]}"
"coValue" => "%{[records][coValue]}"
"pm10Grade1h" => "%{[records][pm10Grade1h]}"
"o3Grade" => "%{[records][o3Grade]}"
"pm10Value" => "%{[records][pm10Value]}"
"khaiGrade" => "%{[records][khaiGrade]}"
"pm25Value" => "%{[records][pm25Value]}"
"no2Grade" => "%{[records][no2Grade]}"
"pm25Value24" => "%{[records][pm25Value24]}"
"pm25Grade" => "%{[records][pm25Grade]}"
"mangName" => "%{[records][mangName]}"
"coGrade" => "%{[records][coGrade]}"
"dataTime" => "%{[records][dataTime]}"
"no2Value" => "%{[records][no2Value]}"
"pm10Grade" => "%{[records][pm10Grade]}"
"o3Value" => "%{[records][o3Value]}"
}
}
# validation
mutate {
gsub => [
"so2Grade" , "[^0-9.]" , "",
"pm25Grade1h" , "[^0-9.]" , "",
"pm10Value24" , "[^0-9.]" , "",
"khaiValue" , "[^0-9.]" , "",
"so2Value" , "[^0-9.]" , "",
"coValue" , "[^0-9.]" , "",
"pm10Grade1h" , "[^0-9.]" , "",
"o3Grade" , "[^0-9.]" , "",
"pm10Value" , "[^0-9.]" , "",
"khaiGrade" , "[^0-9.]" , "",
"pm25Value" , "[^0-9.]" , "",
"no2Grade" , "[^0-9.]" , "",
"pm25Value24" , "[^0-9.]" , "",
"pm25Grade" , "[^0-9.]" , "",
"coGrade" , "[^0-9.]" , "",
"no2Value" , "[^0-9.]" , "",
"pm10Grade" , "[^0-9.]" , "",
"o3Value" , "[^0-9.]" , ""
]
}
# convert field
mutate {
convert => {
"so2Grade" => "float"
"pm25Grade1h" => "float"
"pm10Value24" => "float"
"khaiValue" => "float"
"so2Value" => "float"
"coValue" => "float"
"pm10Grade1h" => "float"
"o3Grade" => "float"
"pm10Value" => "float"
"khaiGrade" => "float"
"pm25Value" => "float"
"no2Grade" => "float"
"pm25Value24" => "float"
"pm25Grade" => "float"
"coGrade" => "float"
"no2Value" => "float"
"pm10Grade" => "float"
"o3Value" => "float"
}
}
# date
date {
match => ["dataTime" , "yyyy-MM-dd HH:mm"]
}
}
output {
stdout { codec => rubydebug }
file {
codec => json_lines
path => "/media/sf_Class/jptest/output_test.json"
}
# elasticsearch {
# hosts => "localhost"
# index => "atmosphere"
# }
}
'4차 산업 > ELK 관련' 카테고리의 다른 글
공공데이터 포털 API 호출 후 JSON 여러 행으로 출력하기 (0) | 2019.03.28 |
---|