-- -- (C) 2013-17 - ntop.org -- require "lua_utils" require "db_utils" require "historical_utils" local host_pools_utils = require "host_pools_utils" local top_rrds = { {rrd="num_flows.rrd", label=i18n("graphs.active_flows")}, {rrd="num_hosts.rrd", label=i18n("graphs.active_hosts")}, {rrd="num_devices.rrd", label=i18n("graphs.active_devices")}, {rrd="num_http_hosts.rrd", label=i18n("graphs.active_http_servers")}, {rrd="bytes.rrd", label=i18n("traffic")}, {rrd="broadcast_bytes.rrd", label=i18n("broadcast_traffic")}, {rrd="packets.rrd", label=i18n("packets")}, {rrd="drops.rrd", label=i18n("graphs.packet_drops")}, {rrd="num_zmq_received_flows.rrd", label=i18n("graphs.zmq_received_flows")}, {separator=1}, {rrd="tcp_lost.rrd", label=i18n("graphs.tcp_packets_lost")}, {rrd="tcp_ooo.rrd", label=i18n("graphs.tcp_packets_ooo")}, {rrd="tcp_retransmissions.rrd", label=i18n("graphs.tcp_packets_retr")}, {rrd="tcp_retr_ooo_lost.rrd", label=i18n("graphs.tcp_retr_ooo_lost")}, {separator=1}, {rrd="tcp_syn.rrd", label=i18n("graphs.tcp_syn_packets")}, {rrd="tcp_synack.rrd", label=i18n("graphs.tcp_synack_packets")}, {rrd="tcp_finack.rrd", label=i18n("graphs.tcp_finack_packets")}, {rrd="tcp_rst.rrd", label=i18n("graphs.tcp_rst_packets")}, } -- ######################################################## if(ntop.isPro()) then package.path = dirs.installdir .. "/pro/scripts/lua/modules/?.lua;" .. package.path require "nv_graph_utils" end -- ######################################################## function getProtoVolume(ifName, start_time, end_time) ifId = getInterfaceId(ifName) path = fixPath(dirs.workingdir .. "/" .. ifId .. "/rrd/") rrds = ntop.readdir(path) ret = { } for rrdFile,v in pairs(rrds) do if((string.ends(rrdFile, ".rrd")) and (not isTopRRD(rrdFile))) then rrdname = getRRDName(ifId, nil, rrdFile) if(ntop.notEmptyFile(rrdname)) then local fstart, fstep, fnames, fdata = ntop.rrd_fetch(rrdname, 'AVERAGE', start_time, end_time) if(fstart ~= nil) then local num_points_found = table.getn(fdata) accumulated = 0 for i, v in ipairs(fdata) do for _, w in ipairs(v) do if(w ~= w) then -- This is a NaN v = 0 else --io.write(w.."\n") v = tonumber(w) if(v < 0) then v = 0 end end end accumulated = accumulated + v end if(accumulated > 0) then rrdFile = string.sub(rrdFile, 1, string.len(rrdFile)-4) ret[rrdFile] = accumulated end end end end end return(ret) end -- ######################################################## function navigatedir(url, label, base, path, go_deep, print_html, ifid, host, start_time, end_time, filter) local shown = false local to_skip = false local ret = { } local do_debug = false local printed = false -- io.write(debug.traceback().."\n") local rrds = ntop.readdir(path) for k,v in pairsByKeys(rrds, asc) do if(v ~= nil) then local p = fixPath(path .. "/" .. v) if(ntop.isdir(p)) then if(go_deep) then r = navigatedir(url, label.."/"..v, base, p, go_deep, print_html, ifid, host, start_time, end_time, filter) for k,v in pairs(r) do ret[k] = v if(do_debug) then print(v.."
\n") end end end else local last_update,_ = ntop.rrd_lastupdate(getRRDName(ifid, host, k)) if last_update ~= nil and last_update >= start_time then -- only show if there has been an update within the specified time frame if not isTopRRD(v) and (not filter or filter[k:gsub('.rrd','')]) then if(label == "*") then to_skip = true else if(not(shown) and not(to_skip)) then if(print_html) then if(not(printed)) then print('
  • \n') printed = true end print('\n') end end return(ret) end -- ######################################################## function breakdownBar(sent, sentLabel, rcvd, rcvdLabel, thresholdLow, thresholdHigh) if((sent+rcvd) > 0) then sent2rcvd = round((sent * 100) / (sent+rcvd), 0) -- io.write("****>> "..sent.."/"..rcvd.."/"..sent2rcvd.."\n") if((thresholdLow == nil) or (thresholdLow < 0)) then thresholdLow = 0 end if((thresholdHigh == nil) or (thresholdHigh > 100)) then thresholdHigh = 100 end if(sent2rcvd < thresholdLow) then sentLabel = ' '..sentLabel elseif(sent2rcvd > thresholdHigh) then rcvdLabel = ' '..rcvdLabel end print('
    '..sentLabel) print('
    ' .. rcvdLabel .. '
    ') else print(' ') end end -- ######################################################## function percentageBar(total, value, valueLabel) -- io.write("****>> "..total.."/"..value.."\n") if((total ~= nil) and (total > 0)) then pctg = round((value * 100) / total, 0) print('
    '..valueLabel) print('
    ') else print(' ') end end -- ######################################################## -- host_or_network: host or network name. -- If network, must be prefixed with 'net:' -- If profile, must be prefixed with 'profile:' -- If host pool, must be prefixed with 'pool:' -- If vlan, must be prefixed with 'vlan:' -- If asn, must be prefixed with 'asn:' function getRRDName(ifid, host_or_network, rrdFile) if host_or_network ~= nil and string.starts(host_or_network, 'net:') then host_or_network = string.gsub(host_or_network, 'net:', '') rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/subnetstats/") elseif host_or_network ~= nil and string.starts(host_or_network, 'profile:') then host_or_network = string.gsub(host_or_network, 'profile:', '') rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/profilestats/") elseif host_or_network ~= nil and string.starts(host_or_network, 'vlan:') then host_or_network = string.gsub(host_or_network, 'vlan:', '') rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/vlanstats/") elseif host_or_network ~= nil and string.starts(host_or_network, 'pool:') then host_or_network = string.gsub(host_or_network, 'pool:', '') rrdname = host_pools_utils.getRRDBase(ifid, "") elseif host_or_network ~= nil and string.starts(host_or_network, 'snmp:') then host_or_network = string.gsub(host_or_network, 'snmp:', '') -- snmpstats are ntopng-wide so ifid is ignored rrdname = fixPath(dirs.workingdir .. "/snmpstats/") elseif host_or_network ~= nil and string.starts(host_or_network, 'flow_device:') then host_or_network = string.gsub(host_or_network, 'flow_device:', '') rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/flow_devices/") elseif host_or_network ~= nil and string.starts(host_or_network, 'sflow:') then host_or_network = string.gsub(host_or_network, 'sflow:', '') rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/sflow/") elseif host_or_network ~= nil and string.starts(host_or_network, 'vlan:') then host_or_network = string.gsub(host_or_network, 'vlan:', '') rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/vlanstats/") elseif host_or_network ~= nil and string.starts(host_or_network, 'asn:') then host_or_network = string.gsub(host_or_network, 'asn:', '') rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/asnstats/") else rrdname = fixPath(dirs.workingdir .. "/" .. ifid .. "/rrd/") end if(host_or_network ~= nil) then rrdname = rrdname .. getPathFromKey(host_or_network) .. "/" end return fixPath(rrdname..(rrdFile or '')) end -- ######################################################## -- label, relative_difference, seconds, graph_tick_step zoom_vals = { { "1m", "now-60s", 60, (60)/12 }, { "5m", "now-300s", 60*5, (60*5)/10 }, { "10m", "now-600s", 60*10, (60*10)/10 }, { "1h", "now-1h", 60*60*1, (60*60*1)/12 }, { "3h", "now-3h", 60*60*3, (60*60*3)/12 }, { "6h", "now-6h", 60*60*6, (60*60*6)/12 }, { "12h", "now-12h", 60*60*12, (60*60*12)/12 }, { "1d", "now-1d", 60*60*24, (60*60*24)/12 }, { "1w", "now-1w", 60*60*24*7, (60*60*24*7)/7 }, { "2w", "now-2w", 60*60*24*14, (60*60*24*14)/14 }, { "1M", "now-1mon", 60*60*24*31, (60*60*24*31)/15 }, { "6M", "now-6mon", 60*60*24*31*6, (60*60*24*31*6)/18 }, { "1Y", "now-1y", 60*60*24*366, (60*60*24*366)/12 } } function getZoomAtPos(cur_zoom, pos_offset) local pos = 1 local new_zoom_level = cur_zoom for k,v in pairs(zoom_vals) do if(zoom_vals[k][1] == cur_zoom) then if (pos+pos_offset >= 1 and pos+pos_offset < 13) then new_zoom_level = zoom_vals[pos+pos_offset][1] break end end pos = pos + 1 end return new_zoom_level end -- ######################################################## function getZoomDuration(cur_zoom) for k,v in pairs(zoom_vals) do if(zoom_vals[k][1] == cur_zoom) then return(zoom_vals[k][3]) end end return(180) end -- ######################################################## function zoomLevel2sec(zoomLevel) if(zoomLevel == nil) then zoomLevel = "1h" end for k,v in ipairs(zoom_vals) do if(zoom_vals[k][1] == zoomLevel) then return(zoom_vals[k][3]) end end return(3600) -- NOT REACHED end -- ######################################################## function getZoomTicksInterval(cur_zoom) for k,v in pairs(zoom_vals) do if(zoom_vals[k][1] == cur_zoom) then return(zoom_vals[k][4]) end end return(12) end -- ######################################################## function getZoomTicksJsArray(start_time, end_time, zoom) local parts = {} local step = getZoomTicksInterval(zoom) for t=start_time,end_time,step do parts[#parts+1] = t end return "[" .. table.concat(parts, ', ') .. "]" end -- ######################################################## function drawPeity(ifid, host, rrdFile, zoomLevel, selectedEpoch) rrdname = getRRDName(ifid, host, rrdFile) if(zoomLevel == nil) then zoomLevel = "1h" end nextZoomLevel = zoomLevel; epoch = tonumber(selectedEpoch); for k,v in ipairs(zoom_vals) do if(zoom_vals[k][1] == zoomLevel) then if(k > 1) then nextZoomLevel = zoom_vals[k-1][1] end if(epoch) then start_time = epoch - zoom_vals[k][3]/2 end_time = epoch + zoom_vals[k][3]/2 else end_time = os.time() start_time = end_time - zoom_vals[k][3]/2 end end end --print("=> Found "..rrdname.."

    \n") if(ntop.notEmptyFile(rrdname)) then --io.write("=> Found ".. start_time .. "|" .. end_time .. "

    \n") local fstart, fstep, fnames, fdata = ntop.rrd_fetch(rrdname, 'AVERAGE', start_time, end_time) if(fstart ~= nil) then local max_num_points = 512 -- This is to avoid having too many points and thus a fat graph local num_points_found = table.getn(fdata) local sample_rate = round(num_points_found / max_num_points) local num_points = 0 local step = 1 local series = {} if(sample_rate < 1) then sample_rate = 1 end -- print("=> "..num_points_found.."[".. sample_rate .."]["..fstart.."]

    ") id = 0 num = 0 total = 0 sample_rate = sample_rate-1 points = {} for i, v in ipairs(fdata) do timestamp = fstart + (i-1)*fstep num_points = num_points + 1 local elemId = 1 for _, w in ipairs(v) do if(w ~= w) then -- This is a NaN v = 0 else v = tonumber(w) if(v < 0) then v = 0 end end value = v*8 -- bps total = total + value if(id == sample_rate) then points[num] = round(value).."" num = num+1 id = 0 else id = id + 1 end elemId = elemId + 1 end end end end print(""..round(total).." ") for i=0,10 do if(i > 0) then print(",") end print(points[i]) end print("\n") end -- ######################################################## function isTopRRD(filename) for _,top in ipairs(top_rrds) do if top.rrd == filename then return true end end return false end function isLayer4RRD(filename) for _, l4 in pairs(l4_keys) do if filename:starts(l4[2]) or filename:starts(l4[1]) then return true end end return false end -- ######################################################## function printTopRRDs(ifid, host, start_time, baseurl, zoomLevel, selectedEpoch) local needs_separator = false for _,top in ipairs(top_rrds) do if top.separator then needs_separator = true else local k = top.rrd local v = top.label -- only show if there has been an update within the specified time frame local last_update,_ = ntop.rrd_lastupdate(getRRDName(ifid, host, k)) if last_update ~= nil and last_update >= start_time then if needs_separator then -- Only add the separator if there are actually some entries in the group print('

  • \n') needs_separator = false end print('
  • '.. v ..'
  • \n') end end end end -- ######################################################## function drawRRD(ifid, host, rrdFile, zoomLevel, baseurl, show_timeseries, selectedEpoch, selected_epoch_sanitized, topArray) local debug_rrd = false if(zoomLevel == nil) then zoomLevel = "1h" end if((selectedEpoch == nil) or (selectedEpoch == "")) then -- Refresh the page every minute unless: -- ** a specific epoch has been selected or -- ** the user is browsing historical top talkers and protocols print[[ ]] end if ntop.isPro() then _ifstats = interface.getStats() drawProGraph(ifid, host, rrdFile, zoomLevel, baseurl, show_timeseries, selectedEpoch, selected_epoch_sanitized, topArray) return end dirs = ntop.getDirs() rrdname = getRRDName(ifid, host, rrdFile) names = {} series = {} nextZoomLevel = zoomLevel; epoch = tonumber(selectedEpoch); for k,v in ipairs(zoom_vals) do if(zoom_vals[k][1] == zoomLevel) then if(k > 1) then nextZoomLevel = zoom_vals[k-1][1] end if(epoch ~= nil) then start_time = epoch - zoom_vals[k][3]/2 end_time = epoch + zoom_vals[k][3]/2 else end_time = os.time() start_time = end_time - zoom_vals[k][3] end end end prefixLabel = l4Label(string.gsub(rrdFile, ".rrd", "")) -- io.write(prefixLabel.."\n") if(prefixLabel == "Bytes") then prefixLabel = "Traffic" end if(ntop.notEmptyFile(rrdname)) then print [[

    ]] if(show_timeseries == 1) then print [[
    ]] end -- show_timeseries == 1 print(' Timeframe:
    \n') for k,v in ipairs(zoom_vals) do -- display 1 minute button only for networks and interface stats -- but exclude applications. Application statistics are gathered -- every 5 minutes local net_or_profile = false if host and (string.starts(host, 'net:') or string.starts(host, 'profile:') or string.starts(host, 'pool:') or string.starts(host, 'vlan:') or string.starts(host, 'asn:')) then net_or_profile = true end if zoom_vals[k][1] == '1m' and (net_or_profile or (not net_or_profile and not isTopRRD(rrdFile))) then goto continue end print('\n') ::continue:: end print [[

    NOTE: Click on the graph to zoom.

    ]] if(string.contains(rrdFile, "num_")) then formatter_fctn = "fint" else formatter_fctn = "fpackets" end rrd = rrd2json(ifid, host, rrdFile, start_time, end_time, true, false) -- the latest false means: expand_interface_views if (topArray ~= nil) then print [[ ]] print(' \n') if(string.contains(rrdFile, "num_") or string.contains(rrdFile, "tcp_") or string.contains(rrdFile, "packets") or string.contains(rrdFile, "drops")) then print(' \n') print(' \n') print(' \n') print(' \n') print(' \n') else formatter_fctn = "fbits" print(' \n') print(' \n') print(' \n') print(' \n') print(' \n') end print(' \n') print(' \n') print [[
     TimeValue
    Min' .. os.date("%x %X", rrd.minval_time) .. '' .. formatValue(rrd.minval) .. '
    Max' .. os.date("%x %X", rrd.maxval_time) .. '' .. formatValue(rrd.maxval) .. '
    Last' .. os.date("%x %X", rrd.lastval_time) .. '' .. formatValue(round(rrd.lastval), 1) .. '
    Average' .. formatValue(round(rrd.average, 2)) .. '
    Total Number' .. formatValue(round(rrd.totalval)) .. '
    Min' .. os.date("%x %X", rrd.minval_time) .. '' .. bitsToSize(rrd.minval) .. '
    Max' .. os.date("%x %X", rrd.maxval_time) .. '' .. bitsToSize(rrd.maxval) .. '
    Last' .. os.date("%x %X", rrd.lastval_time) .. '' .. bitsToSize(rrd.lastval) .. '
    Average' .. bitsToSize(rrd.average*8) .. '
    Total Traffic' .. bytesToSize(rrd.totalval) .. '
    Selection Time
    Minute
    Interface
    Top Talkers
    ]] end -- topArray ~= nil print[[
    ]] if ntop.getPrefs().is_dump_flows_to_mysql_enabled -- hide historical tabs for networks and profiles and pools and not string.starts(host, 'net:') and not string.starts(host, 'pool:') and not string.starts(host, 'vlan:') and not string.starts(host, 'asn:') then local k2info = hostkey2hostinfo(host) print('
    ') if tonumber(start_time) ~= nil and tonumber(end_time) ~= nil then -- if both start_time and end_time are vaid epoch we can print finer-grained top flows historicalFlowsTab(ifid, k2info["host"] or '', start_time, end_time, rrdFile, '', '', '', k2info["vlan"]) else printGraphTopFlows(ifid, k2info["host"] or '', _GET["epoch"], zoomLevel, rrdFile, k2info["vlan"]) end print('
    ') end print[[
    ]] else print("
    File "..rrdname.." cannot be found
    ") end end -- ######################################################## function create_rrd(name, step, ds) step = tonumber(step) if step == nil or step <= 1 then step = 1 end if(not(ntop.exists(name))) then if(enable_second_debug == 1) then io.write('Creating RRD ', name, '\n') end local prefs = ntop.getPrefs() ntop.rrd_create( name, step, -- step 'DS:' .. ds .. ':DERIVE:'.. step * 5 .. ':U:U', 'RRA:AVERAGE:0.5:1:'..tostring(prefs.intf_rrd_raw_days*24*(3600/step)), -- raw: 1 day = 86400 'RRA:AVERAGE:0.5:'..(60/step)..':'..tostring(prefs.intf_rrd_1min_days*24*60), -- 1 min resolution = 1 month 'RRA:AVERAGE:0.5:'..(3600/step)..':'..tostring(prefs.intf_rrd_1h_days*24), -- 1h resolution (3600 points) 2400 hours = 100 days 'RRA:AVERAGE:0.5:'..(86400)..':'..tostring(prefs.intf_rrd_1d_days) -- 1d resolution (86400 points) 365 days -- 'RRA:HWPREDICT:1440:0.1:0.0035:20' ) end end function create_rrd_num(name, ds, step) step = tonumber(step) if step == nil or step <= 1 then step = 1 end if(not(ntop.exists(name))) then if(enable_second_debug == 1) then io.write('Creating RRD ', name, '\n') end local prefs = ntop.getPrefs() ntop.rrd_create( name, step, -- step 'DS:' .. ds .. ':GAUGE:' .. step * 5 .. ':0:U', 'RRA:AVERAGE:0.5:1:'..tostring(prefs.intf_rrd_raw_days*24*(3600/step)), -- raw: 1 day = 86400 'RRA:AVERAGE:0.5:'..(3600/step)..':'..tostring(prefs.intf_rrd_1h_days*24), -- 1h resolution (3600 points) 2400 hours = 100 days 'RRA:AVERAGE:0.5:'..(86400/step)..':'..tostring(prefs.intf_rrd_1d_days) -- 1d resolution (86400 points) 365 days -- 'RRA:HWPREDICT:1440:0.1:0.0035:20' ) end end function makeRRD(basedir, ifname, rrdname, step, value) local name = fixPath(basedir .. "/" .. rrdname .. ".rrd") if(string.contains(rrdname, "num_")) then create_rrd_num(name, rrdname, step) else create_rrd(name, step, rrdname) end ntop.rrd_update(name, "N:".. tolongint(value)) if(enable_second_debug) then io.write('Updating RRD ['.. ifname..'] '.. name .. " " .. value ..'\n') end end function createRRDcounter(path, step, verbose) if(not(ntop.exists(path))) then if(verbose) then print('Creating RRD ', path, '\n') end local prefs = ntop.getPrefs() local hb = step * 2 -- Default hb = 2 minutes ntop.rrd_create( path, step, -- step 'DS:sent:DERIVE:'..hb..':U:U', 'DS:rcvd:DERIVE:'..hb..':U:U', 'RRA:AVERAGE:0.5:1:'..tostring(prefs.other_rrd_raw_days*24*(3600/step)), -- raw: 1 day = 1 * 24 = 24 * 12 = 288 'RRA:AVERAGE:0.5:'..(3600/step)..':'..tostring(prefs.other_rrd_1h_days*24), -- 1h resolution (12 points) 2400 hours = 100 days 'RRA:AVERAGE:0.5:'..(86400/step)..':'..tostring(prefs.other_rrd_1d_days) -- 1d resolution (288 points) 365 days --'RRA:HWPREDICT:1440:0.1:0.0035:20' ) end end -- ######################################################## function createSingleRRDcounter(path, step, verbose) if(not(ntop.exists(path))) then if(verbose) then print('Creating RRD ', path, '\n') end local prefs = ntop.getPrefs() local hb = step * 2 -- Default hb = 2 minutes ntop.rrd_create( path, step, -- step 'DS:num:DERIVE:'..hb..':U:U', 'RRA:AVERAGE:0.5:1:'..tostring(prefs.other_rrd_raw_days*24*(3600/step)), -- raw: 1 day = 1 * 24 = 24 * 12 = 288 'RRA:AVERAGE:0.5:'..(3600/step)..':'..tostring(prefs.other_rrd_1h_days*24), -- 1h resolution (12 points) 2400 hours = 100 days 'RRA:AVERAGE:0.5:'..(86400/step)..':'..tostring(prefs.other_rrd_1d_days) -- 1d resolution (288 points) 365 days -- 'RRA:HWPREDICT:1440:0.1:0.0035:20' ) end end -- ######################################################## -- this method will be very likely used when saving subnet rrd traffic statistics function createTripleRRDcounter(path, step, verbose) if(not(ntop.exists(path))) then if(verbose) then io.write('Creating RRD '..path..'\n') end local prefs = ntop.getPrefs() local hb = step * 2 -- Default hb = 2 minutes ntop.rrd_create( path, step, -- step 'DS:ingress:DERIVE:'..hb..':U:U', 'DS:egress:DERIVE:'..hb..':U:U', 'DS:inner:DERIVE:'..hb..':U:U', 'RRA:AVERAGE:0.5:1:'..tostring(prefs.other_rrd_raw_days*24*(3600/step)), -- raw: 1 day = 1 * 24 = 24 * 12 = 288 'RRA:AVERAGE:0.5:12:'..tostring(prefs.other_rrd_1h_days*24), -- 1h resolution (12 points) 2400 hours = 100 days 'RRA:AVERAGE:0.5:288:'..tostring(prefs.other_rrd_1d_days) -- 1d resolution (288 points) 365 days --'RRA:HWPREDICT:1440:0.1:0.0035:20' ) end end function printGraphTopFlows(ifId, host, epoch, zoomLevel, l7proto, vlan) -- Check if the DB is enabled rsp = interface.execSQLQuery("show tables") if(rsp == nil) then return end if((epoch == nil) or (epoch == "")) then epoch = os.time() end local d = getZoomDuration(zoomLevel) epoch_end = epoch epoch_begin = epoch-d historicalFlowsTab(ifId, host, epoch_begin, epoch_end, l7proto, '', '', '', vlan) end -- ######################################################## -- Make sure we do not fetch data from RRDs that have been update too much long ago -- as this creates issues with the consolidation functions when we want to compare -- results coming from different RRDs. -- This is also needed to make sure that multiple data series on graphs have the -- same number of points, otherwise d3js will generate errors. function touchRRD(rrdname) local now = os.time() local last, ds_count = ntop.rrd_lastupdate(rrdname) if((last ~= nil) and ((now-last) > 3600)) then local tdiff = now - 1800 -- This avoids to set the update continuously local label = tdiff for i=1,ds_count do label = label .. ":0" end ntop.rrd_update(rrdname, label) end end -- ######################################################## -- reads one or more RRDs and returns a json suitable to feed rickshaw function singlerrd2json(ifid, host, rrdFile, start_time, end_time, rickshaw_json, append_ifname_to_labels, transform_columns_function) local rrdname = getRRDName(ifid, host, rrdFile) local names = {} local names_cache = {} local series = {} local prefixLabel = l4Label(string.gsub(rrdFile, ".rrd", "")) -- with a scaling factor we can stretch or shrink rrd values -- by default we set this to a value of 8, in order to convert bytes -- rrds into bits. local scaling_factor = 8 touchRRD(rrdname) --io.write(prefixLabel.."\n") if(prefixLabel == "Bytes") then prefixLabel = "Traffic" elseif string.starts(rrdFile, "categories/") then prefixLabel = prefixLabel.." Traffic" end if(string.contains(rrdFile, "num_") or string.contains(rrdFile, "tcp_") or string.contains(rrdFile, "packets") or string.contains(rrdFile, "drops")) then -- do not scale number, packets, and drops scaling_factor = 1 end if(not ntop.notEmptyFile(rrdname)) then return '{}' end local fstart, fstep, fnames, fdata = ntop.rrd_fetch(rrdname, 'AVERAGE', start_time, end_time) if(fstart == nil) then return '{}' end if transform_columns_function ~= nil then --~ tprint(rrdname) fstart, fstep, fnames, fdata, prefixLabel = transform_columns_function(fstart, fstep, fnames, fdata) prefixLabel = prefixLabel or "" end --[[ io.write('start time: '..start_time..' end_time: '..end_time..'\n') io.write('fstart: '..fstart..' fstep: '..fstep..' rrdname: '..rrdname..'\n') io.write('len(fdata): '..table.getn(fdata)..'\n') --]] local max_num_points = 600 -- This is to avoid having too many points and thus a fat graph if tonumber(global_max_num_points) ~= nil then max_num_points = global_max_num_points end local num_points_found = table.getn(fdata) local sample_rate = round(num_points_found / max_num_points) local port_mode = false if(sample_rate < 1) then sample_rate = 1 end -- Pretty printing for flowdevs/a.b.c.d/e.rrd local elems = split(prefixLabel, "/") if((elems[#elems] ~= nil) and (#elems > 1)) then prefixLabel = capitalize(elems[#elems] or "") port_mode = true end -- prepare rrd labels local protocol_categories = interface.getnDPICategories() for i, n in ipairs(fnames) do -- handle duplicates if (names_cache[n] == nil) then local extra_info = '' names_cache[n] = true if append_ifname_to_labels then extra_info = getInterfaceName(ifid) end if host ~= nil and not string.starts(host, 'profile:') and protocol_categories[prefixLabel] == nil and not string.starts(rrdFile, 'categories/') then extra_info = extra_info..firstToUpper(n) end if string.starts(host, 'asn:') then extra_info = extra_info.." by AS" end if extra_info ~= "" and extra_info ~= prefixLabel then if(port_mode) then if(#names == 0) then names[#names+1] = prefixLabel.." Egress ("..extra_info..") " else names[#names+1] = prefixLabel.." Ingress ("..extra_info..") " end elseif prefixLabel ~= "" then names[#names+1] = prefixLabel.." ("..extra_info..") " else names[#names+1] = extra_info end else names[#names+1] = prefixLabel end end end local minval, maxval, lastval = 0, 0, 0 local maxval_time, minval_time, lastval_time = nil, nil, nil local sampling = 1 local s = {} local totalval, avgval = {}, {} for i, v in ipairs(fdata) do local instant = fstart + (i-1)*fstep -- this is the instant in time corresponding to the datapoint s[0] = instant -- s holds the instant and all the values totalval[instant] = 0 -- totalval holds the sum of all values of this instant avgval[instant] = 0 local elemId = 1 for _, w in ipairs(v) do if(w ~= w) then -- This is a NaN w = 0 else --io.write(w.."\n") w = tonumber(w) if(w < 0) then w = 0 end end -- update the total value counter, which is the non-scaled integral over time totalval[instant] = totalval[instant] + w * fstep -- also update the average val (do not multiply by fstep, this is not the integral) avgval[instant] = avgval[instant] + w -- and the scaled current value (remember that these are derivatives) w = w * scaling_factor -- the scaled current value w goes into its own element elemId if (s[elemId] == nil) then s[elemId] = 0 end s[elemId] = s[elemId] + w --if(s[elemId] > 0) then io.write("[".. elemId .. "]=" .. s[elemId] .."\n") end elemId = elemId + 1 end -- stops every sample_rate samples, or when there are no more points if(sampling == sample_rate or num_points_found == i) then local sample_sum = 0 for elemId=1,#s do -- calculate the average in the sampling period s[elemId] = s[elemId] / sampling sample_sum = sample_sum + s[elemId] end -- update last instant if lastval_time == nil or instant > lastval_time then lastval = sample_sum lastval_time = instant end -- possibly update maximum value (grab the most recent in case of a tie) if maxval_time == nil or (sample_sum >= maxval and instant > maxval_time) then maxval = sample_sum maxval_time = instant end -- possibly update the minimum value (grab the most recent in case of a tie) if minval_time == nil or (sample_sum <= minval and instant > minval_time) then minval = sample_sum minval_time = instant end series[#series+1] = s sampling = 1 s = {} else sampling = sampling + 1 end end local tot = 0 for k, v in pairs(totalval) do tot = tot + v end totalval = tot tot = 0 for k, v in pairs(avgval) do tot = tot + v end local average = tot / num_points_found local percentile = 0.95*maxval local colors = { '#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd', '#8c564b', '#e377c2', '#7f7f7f', '#bcbd22', '#17becf', -- https://github.com/mbostock/d3/wiki/Ordinal-Scales '#ff7f0e', '#ffbb78', '#1f77b4', '#aec7e8', '#2ca02c', '#98df8a', '#d62728', '#ff9896', '#9467bd', '#c5b0d5', '#8c564b', '#c49c94', '#e377c2', '#f7b6d2', '#7f7f7f', '#c7c7c7', '#bcbd22', '#dbdb8d', '#17becf', '#9edae5' } if(names ~= nil) then json_ret = '' if(rickshaw_json) then for elemId=1,#names do if(elemId > 1) then json_ret = json_ret.."\n,\n" end local name = names[elemId] json_ret = json_ret..'{"name": "'.. name .. '",\n' json_ret = json_ret..'color: \''.. colors[elemId] ..'\',\n' json_ret = json_ret..'"data": [\n' n = 0 for key, value in pairs(series) do if(n > 0) then json_ret = json_ret..',\n' end json_ret = json_ret..'\t{ "x": '.. value[0] .. ', "y": '.. value[elemId] .. '}' n = n + 1 end json_ret = json_ret.."\n]}\n" end else -- NV3 local num_entries = 0; for elemId=1,#names do num_entries = num_entries + 1 if(elemId > 1) then json_ret = json_ret.."\n,\n" end name = names[elemId] json_ret = json_ret..'{"key": "'.. name .. '",\n' -- json_ret = json_ret..'"color": "'.. colors[num_entries] ..'",\n' json_ret = json_ret..'"area": true,\n' json_ret = json_ret..'"values": [\n' n = 0 for key, value in pairs(series) do if(n > 0) then json_ret = json_ret..',\n' end json_ret = json_ret..'\t[ '..value[0] .. ', '.. value[elemId] .. ' ]' --json_ret = json_ret..'\t{ "x": '.. value[0] .. ', "y": '.. value[elemId] .. '}' n = n + 1 end json_ret = json_ret.."\n] }\n" end if(false) then json_ret = json_ret..",\n" num_entries = num_entries + 1 json_ret = json_ret..'\n{"key": "Average",\n' json_ret = json_ret..'"color": "'.. colors[num_entries] ..'",\n' json_ret = json_ret..'"type": "line",\n' json_ret = json_ret..'"values": [\n' n = 0 for key, value in pairs(series) do if(n > 0) then json_ret = json_ret..',\n' end --json_ret = json_ret..'\t[ '..value[0] .. ', '.. value[elemId] .. ' ]' json_ret = json_ret..'\t{ "x": '.. value[0] .. ', "y": '.. average .. '}' n = n + 1 end json_ret = json_ret..'\n] },\n' num_entries = num_entries + 1 json_ret = json_ret..'\n{"key": "95th Percentile",\n' json_ret = json_ret..'"color": "'.. colors[num_entries] ..'",\n' json_ret = json_ret..'"type": "line",\n' json_ret = json_ret..'"yAxis": 1,\n' json_ret = json_ret..'"values": [\n' n = 0 for key, value in pairs(series) do if(n > 0) then json_ret = json_ret..',\n' end --json_ret = json_ret..'\t[ '..value[0] .. ', '.. value[elemId] .. ' ]' json_ret = json_ret..'\t{ "x": '.. value[0] .. ', "y": '.. percentile .. '}' n = n + 1 end json_ret = json_ret..'\n] }\n' end end end local ret = {} ret.maxval_time = maxval_time ret.maxval = round(maxval, 0) ret.minval_time = minval_time ret.minval = round(minval, 0) ret.lastval_time = lastval_time ret.lastval = round(lastval, 0) ret.totalval = round(totalval, 0) ret.percentile = round(percentile, 0) ret.average = round(average, 0) ret.json = json_ret return(ret) end -- ################################################# function rrd2json_merge(ret, num) -- if we are expanding an interface view, we want to concatenate -- jsons for single interfaces, and not for the view. Since view statistics -- are in ret[1], it suffices to aggregate jsons from index i >= 2 local json = "[" local first = true -- used to decide where to append commas -- sort by "totalval" to get the top "num" results local by_totalval = {} local totalval = 0 local minval = 0 for i = 1, #ret do by_totalval[i] = ret[i].totalval -- update total totalval = totalval + ret[i].totalval end local ctr = 0 for i,_ in pairsByValues(by_totalval, rev) do if ctr >= num then break end if(debug_metric) then io.write("->"..i.."\n") end if not first then json = json.."," end json = json..ret[i].json first = false ctr = ctr + 1 end json = json.."]" -- the (possibly aggregated) json always goes into ret[1] -- ret[1] possibly contains aggregated view statistics such as -- maxval and maxval_time or minval and minval_time ret[1].json = json if #ret > 1 then -- update the total with the sum of the totals of each timeseries ret[1].totalval = totalval -- remove metrics that are no longer valid for merged rrds for _, k in pairs({'average', 'minval', 'minval_time', 'maxval', 'maxval_time', 'lastval', 'lastval_time', 'percentile'}) do ret[1][k] = nil end end -- io.write(json.."\n") return(ret[1]) end function rrd2json(ifid, host, rrdFile, start_time, end_time, rickshaw_json, expand_interface_views) local ret = {} local num = 0 local debug_metric = false interface.select(getInterfaceName(ifid)) local ifstats = interface.getStats() local rrd_if_ids = {} -- read rrds for interfaces listed here rrd_if_ids[1] = ifid -- the default submitted interface -- interface.select(getInterfaceName(ifid)) if(debug_metric) then io.write('ifid: '..ifid..' ifname:'..getInterfaceName(ifid)..'\n') io.write('expand_interface_views: '..tostring(expand_interface_views)..'\n') end if(debug_metric) then io.write("RRD File: "..rrdFile.."\n") end -- the following code is used to compute stacked charts of top protocols and applications if(rrdFile == "all" or rrdFile == "all_ndpi_categories") then -- all means all l-7 applications -- disable expand interface views for rrdFile == all local expand_interface_views = false local dirs = ntop.getDirs() local d = getRRDName(ifid, host) if(debug_metric) then io.write("Navigating: "..p.."\n") end local go_deep = true local ndpi_protocols = interface.getnDPIProtocols() local ndpi_categories = interface.getnDPICategories() local filter = ndpi_protocols if rrdFile == "all_ndpi_categories" then filter = ndpi_categories end local rrds = navigatedir("", "*", d, d, go_deep, false, ifid, host, start_time, end_time, filter) local traffic_array = {} for key, value in pairs(rrds) do local rsp = singlerrd2json(ifid, host, value, start_time, end_time, rickshaw_json, expand_interface_views) if(rsp.totalval ~= nil) then total = rsp.totalval else total = 0 end if(total > 0) then traffic_array[total] = rsp if(debug_metric) then io.write("Analyzing: "..value.." [total "..total.."]\n") end end ::continue:: end for key, value in pairsByKeys(traffic_array, rev) do ret[#ret+1] = value if(ret[#ret].json ~= nil) then if(debug_metric) then io.write(key.."\n") end num = num + 1 if(num >= 10) then break end end end else num = 0 for _,iface in pairs(rrd_if_ids) do if(debug_metric) then io.write('iface: '..iface..'\n') end for i,rrd in pairs(split(rrdFile, ",")) do if(debug_metric) then io.write("["..i.."] "..rrd..' iface: '..iface.."\n") end ret[#ret + 1] = singlerrd2json(iface, host, rrd, start_time, end_time, rickshaw_json, expand_interface_views) if(ret[#ret].json ~= nil) then num = num + 1 end end end end if(debug_metric) then io.write("#rrds="..num.."\n") end if(num == 0) then ret = {} ret.json = "[]" return(ret) end return rrd2json_merge(ret, num) end -- ################################################# function showHostActivityStats(hostbase, selectedEpoch, zoomLevel) local activbase = hostbase .. "/activity" local nextZoomLevel = zoomLevel; local start_time, end_time if ntop.isdir(activbase) then local epoch = tonumber(selectedEpoch) -- TODO separate function and join drawPeity for k,v in ipairs(zoom_vals) do if(zoom_vals[k][1] == zoomLevel) then if(k > 1) then nextZoomLevel = zoom_vals[k-1][1] end if(epoch) then start_time = epoch - zoom_vals[k][3]/2 end_time = epoch + zoom_vals[k][3]/2 else end_time = os.time() start_time = end_time - zoom_vals[k][3]/2 end end end for key,value in pairs(ntop.readdir(activbase)) do local activrrd = activbase .. "/" .. key; if(ntop.notEmptyFile(activrrd)) then local fstart, fstep, fnames, fdata = ntop.rrd_fetch(activrrd, 'AVERAGE', start_time, end_time) local num_points = table.getn(fdata) print(value.."["..num_points.." points] start="..formatEpoch(start)..", step="..fstep.."s
    ") for i, v in ipairs(fdata) do for _, w in ipairs(v) do if(w ~= w) then -- This is a NaN v = 0 else --io.write(w.."\n") v = tonumber(w) if(v < 0) then v = 0 end end end print(round(v, 2).." ") end print("
    ") end end end end -- ################################################# -- -- proto table should contain the following information: -- string traffic_quota -- string time_quota -- string protoName -- -- ndpi_stats or category_stats can be nil if they are not relevant for the proto -- -- quotas_to_show can contain: -- bool traffic -- bool time -- function printProtocolQuota(proto, ndpi_stats, category_stats, quotas_to_show, show_td, hide_limit) local total_bytes = 0 local total_duration = 0 local output = {} if ndpi_stats ~= nil then -- This is a single protocol local proto_stats = ndpi_stats[proto.protoName] if proto_stats ~= nil then total_bytes = proto_stats["bytes.sent"] + proto_stats["bytes.rcvd"] total_duration = proto_stats["duration"] end else -- This is a category local cat_stats = category_stats[proto.protoName] if cat_stats ~= nil then total_bytes = cat_stats["bytes"] total_duration = cat_stats["duration"] end end if quotas_to_show.traffic then local bytes_exceeded = ((proto.traffic_quota ~= "0") and (total_bytes >= tonumber(proto.traffic_quota))) local lb_bytes = bytesToSize(total_bytes) local lb_bytes_quota = ternary(proto.traffic_quota ~= "0", bytesToSize(tonumber(proto.traffic_quota)), i18n("unlimited")) local traffic_taken = ternary(proto.traffic_quota ~= "0", math.min(total_bytes, proto.traffic_quota), 0) local traffic_remaining = math.max(proto.traffic_quota - traffic_taken, 0) local traffic_quota_ratio = round(traffic_taken * 100 / (traffic_taken+traffic_remaining), 0) if show_td then output[#output + 1] = [["..lb_bytes..ternary(hide_limit, "", " / "..lb_bytes_quota).."" end output[#output + 1] = [[
    '.. ternary(traffic_quota_ratio == traffic_quota_ratio --[[nan check]], traffic_quota_ratio, 0)..[[%
    ]] if show_td then output[#output + 1] = ("") end end if quotas_to_show.time then local time_exceeded = ((proto.time_quota ~= "0") and (total_duration >= tonumber(proto.time_quota))) local lb_duration = secondsToTime(total_duration) local lb_duration_quota = ternary(proto.time_quota ~= "0", secondsToTime(tonumber(proto.time_quota)), i18n("unlimited")) local duration_taken = ternary(proto.time_quota ~= "0", math.min(total_duration, proto.time_quota), 0) local duration_remaining = math.max(proto.time_quota - duration_taken, 0) local duration_quota_ratio = round(duration_taken * 100 / (duration_taken+duration_remaining), 0) if show_td then output[#output + 1] = [["..lb_duration..ternary(hide_limit, "", " / "..lb_duration_quota).."" end output[#output + 1] = ([[
    '.. ternary(duration_quota_ratio == duration_quota_ratio --[[nan check]], duration_quota_ratio, 0)..[[%
    ]]) if show_td then output[#output + 1] = ("") end end return table.concat(output, '') end -- ################################################# function poolDropdown(pool_id, exclude) local output = {} --exclude = exclude or {[host_pools_utils.DEFAULT_POOL_ID]=true} exclude = exclude or {} for _,pool in ipairs(host_pools_utils.getPoolsList(ifId)) do if (not exclude[pool.id]) or (pool.id == pool_id) then output[#output + 1] = '' end end return table.concat(output, '') end function printPoolChangeDropdown(pool_id) local output = {} output[#output + 1] = [[ ]] .. i18n("host_config.host_pool") .. [[
     
    ]] .. i18n("host_pools.edit_host_pools") .. [[ ]] print(table.concat(output, '')) end