我正在使用kibana 4,它在字段列表中显示.raw字段,即result.raw以及结果。
我检查了我的弹性搜索索引映射,他们也显示了原始字段
然后为什么kiban没有显示那些原始字段的任何值(它显示从过去的15天,但昨天我在我的logstash配置文件中放了一些grok模式,从那时起它停止向我显示这些值。现在它没有显示.raw过去数据的字段值以及我放置的模式工作正常并给出分析的字段值但不是.raw字段)
curl -XGET 'localhost:9200/logstash-2015.09.25?pretty'<br><br>
{
"logstash-2015.09.25" : {
"aliases" : { },
"mappings" : {
"_default_" : {
"dynamic_templates" : [ {
"message_field" : {
"mapping" : {
"index" : "analyzed",
"omit_norms" : true,
"type" : "string"
},
"match" : "message",
"match_mapping_type" : "string"
}
}, {
"string_fields" : {
"mapping" : {
"index" : "analyzed",
"omit_norms" : true,
"type" : "string",
"fields" : {
"raw" : {
"index" : "not_analyzed",
"ignore_above" : 256,
"type" : "string"
}
}
},
"match" : "*",
"match_mapping_type" : "string"
}
} ],
"_all" : {
"enabled" : true,
"omit_norms" : true
},
"properties" : {
"@version" : {
"type" : "string",
"index" : "not_analyzed"
},
"geoip" : {
"dynamic" : "true",
"properties" : {
"location" : {
"type" : "geo_point"
}
}
}
}
},
"scan_production" : {
"dynamic_templates" : [ {
"message_field" : {
"mapping" : {
"index" : "analyzed",
"omit_norms" : true,
"type" : "string"
},
"match" : "message",
"match_mapping_type" : "string"
}
}, {
"string_fields" : {
"mapping" : {
"index" : "analyzed",
"omit_norms" : true,
"type" : "string",
"fields" : {
"raw" : {
"index" : "not_analyzed",
"ignore_above" : 256,
"type" : "string"
}
}
},
"match" : "*",
"match_mapping_type" : "string"
}
} ],
"_all" : {
"enabled" : true,
"omit_norms" : true
},
"properties" : {
"@timestamp" : {
"type" : "date",
"format" : "dateOptionalTime"
},
"@version" : {
"type" : "string",
"index" : "not_analyzed"
},
"command" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"file" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"geoip" : {
"dynamic" : "true",
"properties" : {
"location" : {
"type" : "geo_point"
}
}
},
"host" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"id" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"message" : {
"type" : "string",
"norms" : {
"enabled" : false
}
},
"message_type" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"offset" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"received_at" : {
"type" : "date",
"format" : "dateOptionalTime"
},
"received_from" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"result" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"severity" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"tags" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
},
"timestamp" : {
"type" : "date",
"format" : "dateOptionalTime"
},
"type" : {
"type" : "string",
"norms" : {
"enabled" : false
},
"fields" : {
"raw" : {
"type" : "string",
"index" : "not_analyzed",
"ignore_above" : 256
}
}
}
}
},
"settings" : {
"index" : {
"creation_date" : "1443139268796",
"uuid" : "qJyyA60ZSpGY2CuBfoG8JQ",
"number_of_replicas" : "1",
"number_of_shards" : "5",
"refresh_interval" : "5s",
"version" : {
"created" : "1040599"
}
}
},
"warmers" : { }
}
}
cat /etc/logstash/conf.d/lumberjack.conf
input {
lumberjack {
port => 5000
type => "logs"
ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
}
}
filter {
if [type] == "scan_production" {
if "LISTING_SCRAPER SUCCESS" in [message]{
grok{
match => { "message" => "(?<severity>[E]|[W]|%{GREEDYDATA})\, +\[(? <timestamp>%{TIMESTAMP_ISO8601}) \#(?<id>%{INT})\] +%{WORD:message_type} \-\- \: (?<command>%{DATA}\:|%{DATA}\:%{NOTSPACE}) %{NOTSPACE:site_name} \location: (?<location_id>%{INT}|%{SPACE}) time\:\ %{BASE10NUM:site_access_time:float}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
mutate {
convert => ["site_access_time", "float"]
}
}
else if "LISTING_CRAWLER SUCCESS site" in [message]{
grok{
match => { "message" => "(?<severity>[E]|[W]|%{GREEDYDATA})\, +\[(?<timestamp>%{TIMESTAMP_ISO8601}) \#(?<id>%{INT})\] +%{WORD:message_type} \-\- \: (?<command>%{DATA}\:|%{DATA}\:%{NOTSPACE}) %{NOTSPACE:site_name} \location: (?<location_id>%{INT}|%{SPACE}) time\:\ %{BASE10NUM:site_access_time:float} items\: %{BASE10NUM:location_iteam:float}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
mutate {
convert => ["site_access_time", "float"]
}
}
else{
grok {
match => { "message" => "(?<severity>[E]|[W]|%{GREEDYDATA})\, +\[(? <timestamp>%{TIMESTAMP_ISO8601}) \#(?<id>%{INT})\] +%{WORD:message_type} \-\- \: (?<command>%{DATA}\:|%{DATA}\:%{NOTSPACE}) %{GREEDYDATA:result}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
}
}
}
output {
elasticsearch { host => localhost
protocol => http
}
stdout { codec => rubydebug }
}