Skip to content
This repository has been archived by the owner on Nov 22, 2017. It is now read-only.

Make timestamp and default fields configurable #307

Open
wants to merge 1 commit into
base: kibana-ruby
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions KibanaConfig.rb
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ module KibanaConfig
# Default_fields = ['@fields.vhost','@fields.response','@fields.request']
Default_fields = ['@message']

# Field containing timestamps
Timestamp_field = "@timestamp"

# If set to true, Kibana will use the Highlight feature of Elasticsearch to
# display highlighted search results
Highlight_results = true
Expand Down
8 changes: 4 additions & 4 deletions lib/kibana-app.rb
Original file line number Diff line number Diff line change
Expand Up @@ -104,15 +104,15 @@ def link_to url_fragment, mode=:full_url
req = ClientRequest.new(params[:hash])

query_end = SortedQuery.new(
req.search,req.from,req.to,0,limit,'@timestamp','desc')
req.search,req.from,req.to,0,limit,KibanaConfig::Timestamp_field,'desc')
indices_end = Kelastic.index_range(req.from,req.to)
result_end = KelasticMulti.new(query_end,indices_end)

# Oh snaps. too few results for full limit analysis, rerun with less
if (result_end.response['hits']['hits'].length < limit)
limit = (result_end.response['hits']['hits'].length / 2).to_i
query_end = SortedQuery.new(
req.search,req.from,req.to,0,limit,'@timestamp','desc')
req.search,req.from,req.to,0,limit,KibanaConfig::Timestamp_field,'desc')
indices_end = Kelastic.index_range(req.from,req.to)
result_end = KelasticMulti.new(query_end,indices_end)
end
Expand All @@ -122,7 +122,7 @@ def link_to url_fragment, mode=:full_url
count_end = KelasticResponse.count_field(result_end.response,fields)

query_begin = SortedQuery.new(
req.search,req.from,req.to,0,limit,'@timestamp','asc')
req.search,req.from,req.to,0,limit,KibanaConfig::Timestamp_field,'asc')
indices_begin = Kelastic.index_range(req.from,req.to).reverse
result_begin = KelasticMulti.new(query_begin,indices_begin)
count_begin = KelasticResponse.count_field(result_begin.response,fields)
Expand Down Expand Up @@ -273,7 +273,7 @@ def link_to url_fragment, mode=:full_url
i = m.items.new_item
hash = IdRequest.new(hit['_id'],hit['_index']).hash
i.title = KelasticResponse.flatten_hit(hit,req.fields).join(', ')
i.date = Time.iso8601(KelasticResponse.get_field_value(hit,'@timestamp'))
i.date = Time.iso8601(KelasticResponse.get_field_value(hit,KibanaConfig::Timestamp_field))
i.link = link_to("/##{hash}")
i.description = "<pre>#{hit.to_yaml}</pre>"
end
Expand Down
10 changes: 5 additions & 5 deletions lib/query.rb
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def initialize(question, from = nil, to = nil)
# Build the filter part
@filter = {
"range" => {
"@timestamp" => {
KibanaConfig::Timestamp_field => {
"from" => from,
"to" => to
}
Expand Down Expand Up @@ -94,7 +94,7 @@ def initialize(id)
=end
class SortedQuery < Query
attr_accessor :query,:from,:to
def initialize(question, from, to, offset = 0, size = KibanaConfig::Per_page, field = "@timestamp", order = "desc")
def initialize(question, from, to, offset = 0, size = KibanaConfig::Per_page, field = KibanaConfig::Timestamp_field, order = "desc")
super(question, from, to)
@query['from'] = offset
@query['size'] = size
Expand All @@ -120,7 +120,7 @@ def initialize(question, from, to, offset = 0, size = KibanaConfig::Per_page, fi
=end
class HighlightedQuery < Query
attr_accessor :query,:from,:to
def initialize(question, from, to, offset = 0, size = KibanaConfig::Per_page, field = "@timestamp", order = "desc")
def initialize(question, from, to, offset = 0, size = KibanaConfig::Per_page, field = KibanaConfig::Timestamp_field, order = "desc")
super(question, from, to)
@query['from'] = offset
@query['size'] = size
Expand Down Expand Up @@ -149,7 +149,7 @@ def initialize(question, from, to, offset = 0, size = KibanaConfig::Per_page, fi
order:: desc/asc
=end
class DateHistogram < Query
def initialize(question, from, to, interval, field = '@timestamp')
def initialize(question, from, to, interval, field = KibanaConfig::Timestamp_field)
super(question, from, to)
@query['facets'] = {
"count" => {
Expand Down Expand Up @@ -237,7 +237,7 @@ def initialize(question, from, to, field)
field:: Field to analyze
=end
class StatsHistogram < Query
def initialize(question, from, to, field, interval, key_field = '@timestamp')
def initialize(question, from, to, field, interval, key_field = KibanaConfig::Timestamp_field)
super(question, from, to)
@query['facets'] = {
"mean" => {
Expand Down
3 changes: 2 additions & 1 deletion public/lib/js/ajax.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

$(document).ready(function () {

// Bind all click/change/whatever handlers
Expand Down Expand Up @@ -855,7 +856,7 @@ function CreateLogTable(objArray, fields, theme, enableHeader) {
var id = object._id;
var alt = i % 2 == 0 ? '' : 'alt'
var time = prettyDateString(
Date.parse(get_field_value(object,'@timestamp')) + tOffset);
Date.parse(get_field_value(object,window.timestamp_field)) + tOffset);
str += '<tr data-object="' + objid + '" id="logrow_' + objid + '" '+
'class="' + alt + ' logrow">';

Expand Down
6 changes: 3 additions & 3 deletions public/lib/js/stream.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ function pageload(hash) {
window.hashjson = JSON.parse(Base64.decode(hash));

window.hashjson.fields = window.hashjson.fields.length > 0 ?
window.hashjson.fields : new Array('@message');
window.hashjson.fields : window.default_fields;

$('#query h4').text(window.hashjson.search);

Expand Down Expand Up @@ -71,7 +71,7 @@ function getStream() {
id = hit['_id']
index = hit['_index']
if (!(has_time)) {
window.last_time = get_field_value(hit,'@timestamp');
window.last_time = get_field_value(hit,window.timestamp_field);
has_time = true;
}
if ($('#logrow_' + id).length == 0) {
Expand All @@ -90,7 +90,7 @@ function getStream() {

var jlink = $('<a/>').addClass('jlink').attr('href', "../#" + hash).html($('<i/>').addClass('icon-link'));
var linkTableData = $("<td/>").css('white-space', 'nowrap');
linkTableData.text(prettyDateString(Date.parse(get_field_value(hit,'@timestamp')) + tOffset)).prepend(jlink);
linkTableData.text(prettyDateString(Date.parse(get_field_value(hit,window.timestamp_field)) + tOffset)).prepend(jlink);
tableRow.append(linkTableData);
for (var field in fields) {
tableRow.append($("<td/>").text(get_field_value(hit,fields[field])));
Expand Down
5 changes: 4 additions & 1 deletion views/timezone.erb
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,7 @@ if (tmp_offset == 'user') {
window.tOffset = -d.getTimezoneOffset() * 60 * 1000;
} else {
window.tOffset = parseFloat(tmp_offset) * 3600 * 1000;
}
}

window.default_fields = [ "<%= KibanaConfig::Default_fields.join("\", \"") %>" ];
window.timestamp_field = "<%= KibanaConfig::Timestamp_field %>";