--
-- (C) 2013-15 - ntop.org
--
require "lua_utils"
top_rrds = {
["bytes.rrd"] = "Traffic",
["packets.rrd"] = "Packets",
["drops.rrd"] = "Packet Drops",
["num_flows.rrd"] = "Active Flows",
["num_hosts.rrd"] = "Active Hosts",
["num_http_hosts.rrd"] = "Active HTTP Servers"
}
-- ########################################################
if(ntop.isPro()) then
package.path = dirs.installdir .. "/pro/scripts/lua/modules/?.lua;" .. package.path
require "nv_graph_utils"
end
-- ########################################################
function getProtoVolume(ifName, start_time, end_time)
ifId = getInterfaceId(ifName)
path = fixPath(dirs.workingdir .. "/" .. ifId .. "/rrd/")
rrds = ntop.readdir(path)
ret = { }
for rrdFile,v in pairs(rrds) do
if((string.ends(rrdFile, ".rrd")) and (top_rrds[rrdFile] == nil)) then
rrdname = getRRDName(ifId, nil, rrdFile)
if(ntop.notEmptyFile(rrdname)) then
local fstart, fstep, fnames, fdata = ntop.rrd_fetch(rrdname, 'AVERAGE', start_time, end_time)
if(fstart ~= nil) then
local num_points_found = table.getn(fdata)
accumulated = 0
for i, v in ipairs(fdata) do
for _, w in ipairs(v) do
if(w ~= w) then
-- This is a NaN
v = 0
else
--io.write(w.."\n")
v = tonumber(w)
if(v < 0) then
v = 0
end
end
end
accumulated = accumulated + v
end
if(accumulated > 0) then
rrdFile = string.sub(rrdFile, 1, string.len(rrdFile)-4)
ret[rrdFile] = accumulated
end
end
end
end
end
return(ret)
end
-- ########################################################
function navigatedir(url, label, base, path, go_deep, print_html, ifid, host, start_time, end_time)
local shown = false
local to_skip = false
local ret = { }
local do_debug = false
local printed = false
-- io.write(debug.traceback().."\n")
rrds = ntop.readdir(path)
table.sort(rrds)
for k,v in pairsByKeys(rrds, asc) do
if(v ~= nil) then
p = fixPath(path .. "/" .. v)
if(ntop.isdir(p)) then
if(go_deep) then
r = navigatedir(url, label.."/"..v, base, p, print_html, ifid, host, start_time, end_time)
for k,v in pairs(r) do
ret[k] = v
if(do_debug) then print(v.." \n") end
end
end
else
rrd = singlerrd2json(ifid, host, v, start_time, end_time, true)
if((rrd.totalval ~= nil) and (rrd.totalval > 0)) then
if(top_rrds[v] == nil) then
if(label == "*") then
to_skip = true
else
if(not(shown) and not(to_skip)) then
if(print_html) then
if(not(printed)) then print('
\n')
end
shown = true
end
end
what = string.sub(path.."/"..v, string.len(base)+2)
label = string.sub(v, 1, string.len(v)-4)
label = l4Label(string.gsub(label, "_", " "))
ret[label] = what
if(do_debug) then print(what.." \n") end
if(print_html) then
if(not(printed)) then print('\n') printed = true end
print("
\n")
end
end
end
end
end
end
if(shown) then
if(print_html) then print('
\n') end
end
return(ret)
end
-- ########################################################
function breakdownBar(sent, sentLabel, rcvd, rcvdLabel)
if((sent+rcvd) > 0) then
sent2rcvd = round((sent * 100) / (sent+rcvd), 0)
print('
'..sentLabel)
print('
' .. rcvdLabel .. '
')
else
print(' ')
end
end
-- ########################################################
function percentageBar(total, value, valueLabel)
if((total ~= nil) and (total > 0)) then
pctg = round((value * 100) / total, 0)
print('
'..valueLabel)
print('
')
else
print(' ')
end
end
-- ########################################################
-- host_or_network: host or network name.
-- If network, must be prefixed with 'net:'
-- If profile, must be prefixed with 'profile:'
function getRRDName(ifid, host_or_network, rrdFile)
if host_or_network ~= nil and string.starts(host_or_network, 'net:') then
host_or_network = string.gsub(host_or_network, 'net:', '')
rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/subnetstats/")
elseif host_or_network ~= nil and string.starts(host_or_network, 'profile:') then
host_or_network = string.gsub(host_or_network, 'profile:', '')
rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/profilestats/")
else
rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/rrd/")
end
if(host_or_network ~= nil) then
rrdname = rrdname .. getPathFromKey(host_or_network) .. "/"
end
return(rrdname..rrdFile)
end
-- ########################################################
zoom_vals = {
{ "1m", "now-60s", 60 },
{ "5m", "now-300s", 60*5 },
{ "10m", "now-600s", 60*10 },
{ "1h", "now-1h", 60*60*1 },
{ "3h", "now-3h", 60*60*3 },
{ "6h", "now-6h", 60*60*6 },
{ "12h", "now-12h", 60*60*12 },
{ "1d", "now-1d", 60*60*24 },
{ "1w", "now-1w", 60*60*24*7 },
{ "2w", "now-2w", 60*60*24*14 },
{ "1M", "now-1mon", 60*60*24*31 },
{ "6M", "now-6mon", 60*60*24*31*6 },
{ "1Y", "now-1y", 60*60*24*366 }
}
function getZoomAtPos(cur_zoom, pos_offset)
local pos = 1
local new_zoom_level = cur_zoom
for k,v in pairs(zoom_vals) do
if(zoom_vals[k][1] == cur_zoom) then
if (pos+pos_offset >= 1 and pos+pos_offset < 13) then
new_zoom_level = zoom_vals[pos+pos_offset][1]
break
end
end
pos = pos + 1
end
return new_zoom_level
end
-- ########################################################
function getZoomDuration(cur_zoom)
for k,v in pairs(zoom_vals) do
if(zoom_vals[k][1] == cur_zoom) then
return(zoom_vals[k][3])
end
end
return(180)
end
-- ########################################################
function zoomLevel2sec(zoomLevel)
if(zoomLevel == nil) then zoomLevel = "1h" end
for k,v in ipairs(zoom_vals) do
if(zoom_vals[k][1] == zoomLevel) then
return(zoom_vals[k][3])
end
end
return(3600) -- NOT REACHED
end
-- ########################################################
function drawPeity(ifid, host, rrdFile, zoomLevel, selectedEpoch)
rrdname = getRRDName(ifid, host, rrdFile)
if(zoomLevel == nil) then
zoomLevel = "1h"
end
nextZoomLevel = zoomLevel;
epoch = tonumber(selectedEpoch);
for k,v in ipairs(zoom_vals) do
if(zoom_vals[k][1] == zoomLevel) then
if(k > 1) then
nextZoomLevel = zoom_vals[k-1][1]
end
if(epoch) then
start_time = epoch - zoom_vals[k][3]/2
end_time = epoch + zoom_vals[k][3]/2
else
start_time = zoom_vals[k][2]
end_time = "now"
end
end
end
--print("=> Found "..rrdname.."
\n")
if(ntop.notEmptyFile(rrdname)) then
--io.write("=> Found ".. start_time .. "|" .. end_time .. "
\n")
local fstart, fstep, fnames, fdata = ntop.rrd_fetch(rrdname, 'AVERAGE', start_time..", end_time..")
if(fstart ~= nil) then
local max_num_points = 512 -- This is to avoid having too many points and thus a fat graph
local num_points_found = table.getn(fdata)
local sample_rate = round(num_points_found / max_num_points)
local num_points = 0
local step = 1
local series = {}
if(sample_rate < 1) then
sample_rate = 1
end
-- print("=> "..num_points_found.."[".. sample_rate .."]["..fstart.."]
")
id = 0
num = 0
total = 0
sample_rate = sample_rate-1
points = {}
for i, v in ipairs(fdata) do
timestamp = fstart + (i-1)*fstep
num_points = num_points + 1
local elemId = 1
for _, w in ipairs(v) do
if(w ~= w) then
-- This is a NaN
v = 0
else
v = tonumber(w)
if(v < 0) then
v = 0
end
end
value = v*8 -- bps
total = total + value
if(id == sample_rate) then
points[num] = round(value)..""
num = num+1
id = 0
else
id = id + 1
end
elemId = elemId + 1
end
end
end
end
print("
"..round(total).."
")
for i=0,10 do
if(i > 0) then print(",") end
print(points[i])
end
print("\n")
end
-- ########################################################
function drawRRD(ifid, host, rrdFile, zoomLevel, baseurl, show_timeseries,
selectedEpoch, selected_epoch_sanitized, topArray)
local debug_rrd = false
if(zoomLevel == nil) then zoomLevel = "1h" end
if((selectedEpoch == nil) or (selectedEpoch == "")) then
-- Refresh the page every minute unless a specific epoch has been selected
print("\n");
end
if ntop.isPro() then
_ifstats = interface.getStats()
if(_ifstats.isView == true) then topArray = nil end
drawProGraph(ifid, host, rrdFile, zoomLevel, baseurl, show_timeseries, selectedEpoch, selected_epoch_sanitized, topArray)
return
end
dirs = ntop.getDirs()
rrdname = getRRDName(ifid, host, rrdFile)
names = {}
series = {}
if(zoomLevel == nil) then
zoomLevel = "1h"
end
nextZoomLevel = zoomLevel;
epoch = tonumber(selectedEpoch);
for k,v in ipairs(zoom_vals) do
if(zoom_vals[k][1] == zoomLevel) then
if(k > 1) then
nextZoomLevel = zoom_vals[k-1][1]
end
if(epoch) then
start_time = epoch - zoom_vals[k][3]/2
end_time = epoch + zoom_vals[k][3]/2
else
start_time = zoom_vals[k][2]
end_time = "now"
end
end
end
prefixLabel = l4Label(string.gsub(rrdFile, ".rrd", ""))
-- io.write(prefixLabel.."\n")
if(prefixLabel == "Bytes") then
prefixLabel = "Traffic"
end
if(ntop.notEmptyFile(rrdname)) then
print [[
]]
if(show_timeseries == 1) then
print [[
]]
for k,v in pairs(top_rrds) do
rrdname = getRRDName(ifid, host, k)
if(ntop.notEmptyFile(rrdname)) then
rrd = singlerrd2json(ifid, host, k, start_time, end_time, true)
if((rrd.totalval ~= nil) and (rrd.totalval > 0)) then
print('
\n')
end
end
end
dirs = ntop.getDirs()
p = dirs.workingdir .. "/" .. purifyInterfaceName(ifid) .. "/rrd/"
if(host ~= nil) then
p = p .. getPathFromKey(host)
end
d = fixPath(p)
go_deep = false
navigatedir(baseurl .. '&graph_zoom=' .. zoomLevel .. '&epoch=' .. (selectedEpoch or '')..'&rrd_file=',
"*", d, d, go_deep, true, ifid, host, start_time, end_time)
print [[
]]
end -- show_timeseries == 1
print(' Timeframe:
\n')
for k,v in ipairs(zoom_vals) do
-- display 1 minute button only for networks and interface stats
-- but exclude applications. Application statistics are gathered
-- every 5 minutes
local net_or_profile = false
if host and (string.starts(host, 'net:') or string.starts(host, 'profile:')) then
net_or_profile = true
end
if zoom_vals[k][1] == '1m' and (not net_or_profile and not top_rrds[rrdFile]) then
goto continue
end
print('\n')
::continue::
end
print [[
NOTE: Click on the graph to zoom.
]]
if(string.contains(rrdFile, "num_")) then
formatter_fctn = "fint"
else
formatter_fctn = "fpackets"
end
if (topArray ~= nil) then
print [[
]]
print('
Time
Value
\n')
rrd = rrd2json(ifid, host, rrdFile, start_time, end_time, true, false) -- the latest true means: expand_interface_views
if(string.contains(rrdFile, "num_") or string.contains(rrdFile, "packets") or string.contains(rrdFile, "drops")) then
print('
")
end
end
-- ########################################################
function create_rrd(name, step, ds)
if(not(ntop.exists(name))) then
if(enable_second_debug == 1) then io.write('Creating RRD ', name, '\n') end
local prefs = ntop.getPrefs()
ntop.rrd_create(
name,
step, -- step
'DS:' .. ds .. ':DERIVE:5:U:U',
'RRA:AVERAGE:0.5:1:'..tostring(prefs.intf_rrd_raw_days*24*60*60), -- raw: 1 day = 86400
'RRA:AVERAGE:0.5:60:'..tostring(prefs.intf_rrd_1min_days*24*60), -- 1 min resolution = 1 month
'RRA:AVERAGE:0.5:3600:'..tostring(prefs.intf_rrd_1h_days*24), -- 1h resolution (3600 points) 2400 hours = 100 days
'RRA:AVERAGE:0.5:86400:'..tostring(prefs.intf_rrd_1d_days) -- 1d resolution (86400 points) 365 days
-- 'RRA:HWPREDICT:1440:0.1:0.0035:20'
)
end
end
function create_rrd_num(name, ds)
if(not(ntop.exists(name))) then
if(enable_second_debug == 1) then io.write('Creating RRD ', name, '\n') end
local prefs = ntop.getPrefs()
ntop.rrd_create(
name,
1, -- step
'DS:' .. ds .. ':GAUGE:5:0:U',
'RRA:AVERAGE:0.5:1:'..tostring(prefs.intf_rrd_raw_days*24*60*60), -- raw: 1 day = 86400
'RRA:AVERAGE:0.5:3600:'..tostring(prefs.intf_rrd_1h_days*24), -- 1h resolution (3600 points) 2400 hours = 100 days
'RRA:AVERAGE:0.5:86400:'..tostring(prefs.intf_rrd_1d_days) -- 1d resolution (86400 points) 365 days
-- 'RRA:HWPREDICT:1440:0.1:0.0035:20'
)
end
end
function makeRRD(basedir, ifname, rrdname, step, value)
local name = fixPath(basedir .. "/" .. rrdname .. ".rrd")
if(string.contains(rrdname, "num_")) then
create_rrd_num(name, rrdname)
else
create_rrd(name, 1, rrdname)
end
ntop.rrd_update(name, "N:".. tolongint(value))
if(enable_second_debug == 1) then io.write('Updating RRD ['.. ifname..'] '.. name .. " " .. value ..'\n') end
end
function createRRDcounter(path, step, verbose)
if(not(ntop.exists(path))) then
if(verbose) then print('Creating RRD ', path, '\n') end
local prefs = ntop.getPrefs()
ntop.rrd_create(
path,
step, -- step
'DS:sent:DERIVE:600:U:U',
'DS:rcvd:DERIVE:600:U:U',
'RRA:AVERAGE:0.5:1:'..tostring(prefs.other_rrd_raw_days*24*300), -- raw: 1 day = 1 * 24 = 24 * 300 sec = 7200
'RRA:AVERAGE:0.5:12:'..tostring(prefs.other_rrd_1h_days*24), -- 1h resolution (12 points) 2400 hours = 100 days
'RRA:AVERAGE:0.5:288:'..tostring(prefs.other_rrd_1d_days) -- 1d resolution (288 points) 365 days
--'RRA:HWPREDICT:1440:0.1:0.0035:20'
)
end
end
-- ########################################################
function createSingleRRDcounter(path, step, verbose)
if(not(ntop.exists(path))) then
if(verbose) then print('Creating RRD ', path, '\n') end
local prefs = ntop.getPrefs()
ntop.rrd_create(
path,
step, -- step
'DS:num:DERIVE:600:U:U',
'RRA:AVERAGE:0.5:1:'..tostring(prefs.other_rrd_raw_days*24*300), -- raw: 1 day = 1 * 24 = 24 * 300 sec = 7200
'RRA:AVERAGE:0.5:12:'..tostring(prefs.other_rrd_1h_days*24), -- 1h resolution (12 points) 2400 hours = 100 days
'RRA:AVERAGE:0.5:288:'..tostring(prefs.other_rrd_1d_days), -- 1d resolution (288 points) 365 days
'RRA:HWPREDICT:1440:0.1:0.0035:20')
end
end
-- ########################################################
-- this method will be very likely used when saving subnet rrd traffic statistics
function createTripleRRDcounter(path, step, verbose)
if(not(ntop.exists(path))) then
if(verbose) then io.write('Creating RRD '..path..'\n') end
local prefs = ntop.getPrefs()
ntop.rrd_create(
path,
step, -- step
'DS:ingress:DERIVE:600:U:U',
'DS:egress:DERIVE:600:U:U',
'DS:inner:DERIVE:600:U:U',
'RRA:AVERAGE:0.5:1:'..tostring(prefs.other_rrd_raw_days*24*300), -- raw: 1 day = 1 * 24 = 24 * 300 sec = 7200
'RRA:AVERAGE:0.5:12:'..tostring(prefs.other_rrd_1h_days*24), -- 1h resolution (12 points) 2400 hours = 100 days
'RRA:AVERAGE:0.5:288:'..tostring(prefs.other_rrd_1d_days) -- 1d resolution (288 points) 365 days
--'RRA:HWPREDICT:1440:0.1:0.0035:20'
)
end
end
-- ########################################################
function dumpSingleTreeCounters(basedir, label, host, verbose)
what = host[label]
if(what ~= nil) then
for k,v in pairs(what) do
for k1,v1 in pairs(v) do
-- print("-->"..k1.."/".. type(v1).."<--\n")
if(type(v1) == "table") then
for k2,v2 in pairs(v1) do
dname = fixPath(basedir.."/"..label.."/"..k.."/"..k1)
if(not(ntop.exists(dname))) then
ntop.mkdir(dname)
end
fname = dname..fixPath("/"..k2..".rrd")
createSingleRRDcounter(fname, 300, verbose)
ntop.rrd_update(fname, "N:"..toint(v2))
if(verbose) then print("\t"..fname.."\n") end
end
else
dname = fixPath(basedir.."/"..label.."/"..k)
if(not(ntop.exists(dname))) then
ntop.mkdir(dname)
end
fname = dname..fixPath("/"..k1..".rrd")
createSingleRRDcounter(fname, 300, verbose)
ntop.rrd_update(fname, "N:"..toint(v1))
if(verbose) then print("\t"..fname.."\n") end
end
end
end
end
end
function printGraphTopFlows(ifId, host, epoch, zoomLevel, l7proto)
-- Check if the DB is enabled
rsp = interface.execSQLQuery("show tables")
if(rsp == nil) then return end
if((epoch == nil) or (epoch == "")) then epoch = os.time() end
local d = getZoomDuration(zoomLevel)
epoch_end = epoch
epoch_begin = epoch-d
printTopFlows(ifId, host, epoch_begin, epoch_end, l7proto, '', '', '', 5, 5)
end
function printTopFlows(ifId, host, epoch_begin, epoch_end, l7proto, l4proto, port, info, limitv4, limitv6)
url_update = "/lua/get_db_flows.lua?ifId="..ifId.. "&host="..(host or '') .. "&epoch_begin="..epoch_begin.."&epoch_end="..epoch_end.."&l4proto="..l4proto.."&port="..port.."&info="..info
if(l7proto ~= "") then
if(not(isnumber(l7proto))) then
local id
-- io.write(l7proto.."\n")
l7proto = string.gsub(l7proto, "%.rrd", "")
if(string.ends(l7proto, ".rrd")) then l7proto = string.sub(l7proto, 1, -5) end
id = interface.getnDPIProtoId(l7proto)
if(id ~= -1) then
l7proto = id
title = "Top "..l7proto.." Flows"
else
l7proto = ""
end
end
if(l7proto ~= "") then
url_update = url_update.."&l7proto="..l7proto
end
end
if((host == "") and (l4proto == "") and (port == "")) then
title = "Top Flows ["..formatEpoch(epoch_begin).." - "..formatEpoch(epoch_end).."]"
else
title = ""
end
print [[
]]
if(host ~= nil) then
local chunks = {host:match("(%d+)%.(%d+)%.(%d+)%.(%d+)")}
if(#chunks == 4) then
limitv6="0"
end
end
selected = false
if(not((limitv4 == nil) or (limitv4 == "") or (limitv4 == "0"))) then
print('
]]
if(not((limitv4 == nil) or (limitv4 == "") or (limitv4 == "0"))) then
print [[
]]
end
if(not((limitv6 == nil) or (limitv6 == "") or (limitv6 == "0"))) then
if(selected == false) then print('
\n') else print('
\n') end
print('
')
end
print [[
") return end
print [[
var url_update6 = "]] print(url_update.."&limit="..limitv6) print [[&version=6";
var graph_options6 = {
url: url_update6,
perPage: 5, ]]
if(title ~= "") then print('title: "IPv6 '..title..'",\n') else print("title: '',\n") end
print [[
showFilter: true,
showPagination: true,
sort: [ [ "BYTES","desc"] ],
columns: [
{
title: "Key",
field: "idx",
hidden: true,
},
]]
if(ntop.isPro()) then
print [[
{
title: "",
field: "FLOW_URL",
sortable: false,
css: {
textAlign: 'center'
}
},
]]
end
print [[
{
title: "Application",
field: "L7_PROTO",
sortable: true,
css: {
textAlign: 'center'
}
},
{
title: "L4 Proto",
field: "PROTOCOL",
sortable: true,
css: {
textAlign: 'center'
}
},
{
title: "Client",
field: "CLIENT",
sortable: false,
},
{
title: "Server",
field: "SERVER",
sortable: false,
},
{
title: "Begin",
field: "FIRST_SWITCHED",
sortable: true,
css: {
textAlign: 'center'
}
},
{
title: "End",
field: "LAST_SWITCHED",
sortable: true,
css: {
textAlign: 'center'
}
},
{
title: "Bytes",
field: "BYTES",
sortable: true,
css: {
textAlign: 'right'
}
},
{
title: "Avg Thpt",
field: "AVG_THROUGHPUT",
sortable: false,
css: {
textAlign: 'right'
}
}
]
};
var table6 = $("#table-flows6").datatable(graph_options6);
]]
end
-- ########################################################
-- reads one or more RRDs and returns a json suitable to feed rickshaw
function singlerrd2json(ifid, host, rrdFile, start_time, end_time, rickshaw_json)
local rrdname = getRRDName(ifid, host, rrdFile)
local names = {}
local names_cache = {}
local series = {}
local prefixLabel = l4Label(string.gsub(rrdFile, ".rrd", ""))
-- with a scaling factor we can stretch or shrink rrd values
-- by default we set this to a value of 8, in order to convert bytes
-- rrds into bits.
local scaling_factor = 8
--io.write(prefixLabel.."\n")
if(prefixLabel == "Bytes") then
prefixLabel = "Traffic"
end
if(string.contains(rrdFile, "num_") or string.contains(rrdFile, "packets") or string.contains(rrdFile, "drops"))
then
-- do not scale number, packets, and drops
scaling_factor = 1
end
if(not ntop.notEmptyFile(rrdname)) then return '{}' end
local fstart, fstep, fnames, fdata = ntop.rrd_fetch(rrdname, 'AVERAGE', start_time, end_time)
if(fstart == nil) then return '{}' end
--[[
io.write('start time: '..start_time..' end_time: '..end_time..'\n')
io.write('fstart: '..fstart..' fstep: '..fstep..' rrdname: '..rrdname..'\n')
io.write('len(fdata): '..table.getn(fdata)..'\n')
--]]
local max_num_points = 600 -- This is to avoid having too many points and thus a fat graph
local num_points_found = table.getn(fdata)
local sample_rate = round(num_points_found / max_num_points)
if(sample_rate < 1) then sample_rate = 1 end
for i, n in ipairs(fnames) do
-- handle duplicates
if (names_cache[n] == nil) then
names_cache[n] = true
names[#names+1] = prefixLabel.." ("..getInterfaceName(ifid)
if host then names[#names] = names[#names]..', '..firstToUpper(n) end
names[#names] = names[#names]..")"
end
end
local minval, maxval, lastval = 0, 0, 0
local maxval_time, minval_time, lastval_time = nil, nil, nil
local sampling = 1
local s = {}
local totalval = {}
for i, v in ipairs(fdata) do
local instant = fstart + (i-1)*fstep -- this is the instant in time corresponding to the datapoint
s[0] = instant -- s holds the instant and all the values
totalval[instant] = 0 -- totalval holds the sum of all values of this instant
local elemId = 1
for _, w in ipairs(v) do
if(w ~= w) then
-- This is a NaN
w = 0
else
--io.write(w.."\n")
w = tonumber(w)
if(w < 0) then
w = 0
end
end
-- update the total value counter, which is the non-scaled integral over time
totalval[instant] = totalval[instant] + w * fstep
-- and the scaled current value (remember that these are derivatives)
w = w * scaling_factor
-- the scaled current value w goes into its own element elemId
if (s[elemId] == nil) then s[elemId] = 0 end
s[elemId] = s[elemId] + w
--if(s[elemId] > 0) then io.write("[".. elemId .. "]=" .. s[elemId] .."\n") end
elemId = elemId + 1
end
-- stops every sample_rate samples, or when there are no more points
if(sampling == sample_rate or num_points_found == i) then
for elemId=1,#s do
-- calculate the average in the sampling period
s[elemId] = s[elemId] / sampling
end
-- update last instant
if lastval_time == nil or instant > lastval_time then lastval_time = instant end
series[#series+1] = s
sampling = 1
s = {}
else
sampling = sampling + 1
end
end
-- get maximum and minimum values straight from the totals table
maxval_time, maxval = tmax(totalval)
minval_time, minval = tmin(totalval)
-- remember that the totals table does not contain scaled data.
-- therefore we must perform a scaling of these values
lastval = totalval[lastval_time] * scaling_factor
minval = minval * scaling_factor
maxval = maxval * scaling_factor
local tot = 0
for k, v in pairs(totalval) do tot = tot + v end
totalval = tot
local percentile = 0.95*maxval
local average = totalval / num_points_found
local colors = {
'#1f77b4',
'#ff7f0e',
'#2ca02c',
'#d62728',
'#9467bd',
'#8c564b',
'#e377c2',
'#7f7f7f',
'#bcbd22',
'#17becf',
-- https://github.com/mbostock/d3/wiki/Ordinal-Scales
'#ff7f0e',
'#ffbb78',
'#1f77b4',
'#aec7e8',
'#2ca02c',
'#98df8a',
'#d62728',
'#ff9896',
'#9467bd',
'#c5b0d5',
'#8c564b',
'#c49c94',
'#e377c2',
'#f7b6d2',
'#7f7f7f',
'#c7c7c7',
'#bcbd22',
'#dbdb8d',
'#17becf',
'#9edae5'
}
if(names ~= nil) then
json_ret = ''
if(rickshaw_json) then
for elemId=1,#names do
if(elemId > 1) then
json_ret = json_ret.."\n,\n"
end
local name = names[elemId]
json_ret = json_ret..'{"name": "'.. name .. '",\n'
json_ret = json_ret..'color: \''.. colors[elemId] ..'\',\n'
json_ret = json_ret..'"data": [\n'
n = 0
for key, value in pairs(series) do
if(n > 0) then
json_ret = json_ret..',\n'
end
json_ret = json_ret..'\t{ "x": '.. value[0] .. ', "y": '.. value[elemId] .. '}'
n = n + 1
end
json_ret = json_ret.."\n]}\n"
end
else
-- NV3
local num_entries = 0;
for elemId=1,#names do
num_entries = num_entries + 1
if(elemId > 1) then
json_ret = json_ret.."\n,\n"
end
name = names[elemId]
json_ret = json_ret..'{"key": "'.. name .. '",\n'
-- json_ret = json_ret..'"color": "'.. colors[num_entries] ..'",\n'
json_ret = json_ret..'"area": true,\n'
json_ret = json_ret..'"values": [\n'
n = 0
for key, value in pairs(series) do
if(n > 0) then
json_ret = json_ret..',\n'
end
json_ret = json_ret..'\t[ '..value[0] .. ', '.. value[elemId] .. ' ]'
--json_ret = json_ret..'\t{ "x": '.. value[0] .. ', "y": '.. value[elemId] .. '}'
n = n + 1
end
json_ret = json_ret.."\n] }\n"
end
if(false) then
json_ret = json_ret..",\n"
num_entries = num_entries + 1
json_ret = json_ret..'\n{"key": "Average",\n'
json_ret = json_ret..'"color": "'.. colors[num_entries] ..'",\n'
json_ret = json_ret..'"type": "line",\n'
json_ret = json_ret..'"values": [\n'
n = 0
for key, value in pairs(series) do
if(n > 0) then
json_ret = json_ret..',\n'
end
--json_ret = json_ret..'\t[ '..value[0] .. ', '.. value[elemId] .. ' ]'
json_ret = json_ret..'\t{ "x": '.. value[0] .. ', "y": '.. average .. '}'
n = n + 1
end
json_ret = json_ret..'\n] },\n'
num_entries = num_entries + 1
json_ret = json_ret..'\n{"key": "95th Percentile",\n'
json_ret = json_ret..'"color": "'.. colors[num_entries] ..'",\n'
json_ret = json_ret..'"type": "line",\n'
json_ret = json_ret..'"yAxis": 1,\n'
json_ret = json_ret..'"values": [\n'
n = 0
for key, value in pairs(series) do
if(n > 0) then
json_ret = json_ret..',\n'
end
--json_ret = json_ret..'\t[ '..value[0] .. ', '.. value[elemId] .. ' ]'
json_ret = json_ret..'\t{ "x": '.. value[0] .. ', "y": '.. percentile .. '}'
n = n + 1
end
json_ret = json_ret..'\n] }\n'
end
end
end
local ret = {}
ret.maxval_time = maxval_time
ret.maxval = round(maxval, 0)
ret.minval_time = minval_time
ret.minval = round(minval, 0)
ret.lastval_time = lastval_time
ret.lastval = round(lastval, 0)
ret.totalval = round(totalval, 0)
ret.percentile = round(percentile, 0)
ret.average = round(average, 0)
ret.json = json_ret
return(ret)
end
-- #################################################
function rrd2json(ifid, host, rrdFile, start_time, end_time, rickshaw_json, expand_interface_views)
local ret = {}
local num = 0
local debug_metric = false
interface.select(getInterfaceName(ifid))
local ifstats = interface.getStats()
local rrd_if_ids = {} -- read rrds for interfaces listed here
rrd_if_ids[1] = ifid -- the default submitted interface
-- interface.select(getInterfaceName(ifid))
if(debug_metric) then
io.write('ifid: '..ifid..' ifname:'..getInterfaceName(ifid)..'\n')
io.write('expand_interface_views: '..tostring(expand_interface_views)..'\n')
io.write('ifstats.isView: '..tostring(ifstats.isView)..'\n')
end
if expand_interface_views and ifstats.isView then
-- expand rrds for views and read each physical interface separately
for iface,_ in pairs(ifstats.interfaces) do
if(debug_metric) then io.write('iface: '..iface..' id: '..getInterfaceId(iface)..'\n') end
rrd_if_ids[#rrd_if_ids+1] = getInterfaceId(iface)
end
end
if(debug_metric) then io.write("RRD File: "..rrdFile.."\n") end
if(rrdFile == "all") then
-- disable expand interface views for rrdFile == all
expand_interface_views=false
local dirs = ntop.getDirs()
local p = dirs.workingdir .. "/" .. ifid .. "/rrd/"
if(debug_metric) then io.write("Navigating: "..p.."\n") end
if(host ~= nil) then
p = p .. getPathFromKey(host)
go_deep = true
else
go_deep = false
end
d = fixPath(p)
rrds = navigatedir("", "*", d, d, go_deep, false, ifid, host, start_time, end_time)
local traffic_array = {}
for key, value in pairs(rrds) do
rsp = singlerrd2json(ifid, host, value, start_time, end_time, rickshaw_json)
if(rsp.totalval ~= nil) then total = rsp.totalval else total = 0 end
if(total > 0) then
traffic_array[total] = rsp
if(debug_metric) then io.write("Analyzing: "..value.." [total "..total.."]\n") end
end
end
for key, value in pairsByKeys(traffic_array, rev) do
ret[#ret+1] = value
if(ret[#ret].json ~= nil) then
if(debug_metric) then io.write(key.."\n") end
num = num + 1
if(num >= 10) then break end
end
end
else
num = 0
for _,iface in pairs(rrd_if_ids) do
if(debug_metric) then io.write('iface: '..iface..'\n') end
for i,rrd in pairs(split(rrdFile, ",")) do
if(debug_metric) then io.write("["..i.."] "..rrd..' iface: '..iface.."\n") end
ret[#ret + 1] = singlerrd2json(iface, host, rrd, start_time, end_time, rickshaw_json)
if(ret[#ret].json ~= nil) then num = num + 1 end
end
end
end
if(debug_metric) then io.write("#rrds="..num.."\n") end
if(num == 0) then
ret = {}
ret.json = "[]"
return(ret)
end
local i = 1
-- if we are expanding an interface view, we want to concatenate
-- jsons for single interfaces, and not for the view. Since view statistics
-- are in ret[1], it suffices to aggregate jsons from index i >= 2
if expand_interface_views and ifstats.isView then
i = 2
end
local json = "["
local first = true -- used to decide where to append commas
while i <= num do
if(debug_metric) then io.write("->"..i.."\n") end
if not first then json = json.."," end
json = json..ret[i].json
i = i + 1
first = false
end
json = json.."]"
-- the (possibly aggregated) json always goes into ret[1]
-- ret[1] possibly contains aggregated view statistics such as
-- maxval and maxval_time or minval and minval_time
ret[1].json = json
-- io.write(json.."\n")
return(ret[1])
end