Stops lists load during shutdown

This commit is contained in:
Simone Mainardi 2019-10-18 14:46:02 +02:00
parent f006b7304d
commit c68b9af4af

View file

@ -1,5 +1,5 @@
--
-- (C) 2018 - ntop.org
-- (C) 2019 - ntop.org
--
local lists_utils = {}
@ -31,97 +31,97 @@ local is_nedge = ntop.isnEdge()
-- [hosts] 127.0.0.1 1.2.3.4
--
local BUILTIN_LISTS = {
["ntop IP Malware Meltdown"] = {
url = "http://blacklists.ntop.org/blacklist-ip.list",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["ntop Host Malware Meltdown"] = {
url = "http://blacklists.ntop.org/blacklist-hostnames.list",
category = CUSTOM_CATEGORY_MALWARE,
format = "host",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Emerging Threats"] = {
url = "https://rules.emergingthreats.net/fwrules/emerging-Block-IPs.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Cisco Talos Intelligence"] = {
url = "https://talosintelligence.com/documents/ip-blacklist",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Ransomware Domain Blocklist"] = {
url = "https://ransomwaretracker.abuse.ch/downloads/RW_DOMBL.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "domain",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Ransomware IP Blocklist"] = {
url = "https://ransomwaretracker.abuse.ch/downloads/RW_IPBL.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = false, -- Medium False Positive rate
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Feodo Tracker Botnet C2 IP Blocklist"] = {
url = "https://feodotracker.abuse.ch/downloads/ipblocklist.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["SSLBL Botnet C2 IP Blacklist"] = {
url = "https://sslbl.abuse.ch/blacklist/sslipblacklist.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["MalwareDomainList Hosts"] = {
url = "https://www.malwaredomainlist.com/hostslist/hosts.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "hosts",
enabled = false,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Anti-WebMiner"] = {
url = "https://raw.githubusercontent.com/greatis/Anti-WebMiner/master/hosts",
category = CUSTOM_CATEGORY_MINING,
format = "hosts",
enabled = false,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["NoCoin Filter List"] = {
url = "https://raw.githubusercontent.com/hoshsadiq/adblock-nocoin-list/master/hosts.txt",
category = CUSTOM_CATEGORY_MINING,
format = "hosts",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Disconnect.me Simple Ad List"] = {
url = "https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt",
category = CUSTOM_CATEGORY_ADVERTISEMENT,
format = "domain",
enabled = is_nedge,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["hpHosts Ad and Tracking"] = {
url = "https://hosts-file.net/ad_servers.txt",
category = CUSTOM_CATEGORY_ADVERTISEMENT,
format = "hosts",
enabled = is_nedge,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["AdAway default blocklist"] = {
url = "https://adaway.org/hosts.txt",
category = CUSTOM_CATEGORY_ADVERTISEMENT,
format = "hosts",
enabled = is_nedge,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["SSLBL JA3"] = {
url = "https://sslbl.abuse.ch/blacklist/ja3_fingerprints.csv",
format = "ja3_suricata_csv",
category = CUSTOM_CATEGORY_MALWARE,
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}
["ntop IP Malware Meltdown"] = {
url = "http://blacklists.ntop.org/blacklist-ip.list",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["ntop Host Malware Meltdown"] = {
url = "http://blacklists.ntop.org/blacklist-hostnames.list",
category = CUSTOM_CATEGORY_MALWARE,
format = "host",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Emerging Threats"] = {
url = "https://rules.emergingthreats.net/fwrules/emerging-Block-IPs.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Cisco Talos Intelligence"] = {
url = "https://talosintelligence.com/documents/ip-blacklist",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Ransomware Domain Blocklist"] = {
url = "https://ransomwaretracker.abuse.ch/downloads/RW_DOMBL.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "domain",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Ransomware IP Blocklist"] = {
url = "https://ransomwaretracker.abuse.ch/downloads/RW_IPBL.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = false, -- Medium False Positive rate
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Feodo Tracker Botnet C2 IP Blocklist"] = {
url = "https://feodotracker.abuse.ch/downloads/ipblocklist.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["SSLBL Botnet C2 IP Blacklist"] = {
url = "https://sslbl.abuse.ch/blacklist/sslipblacklist.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "ip",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["MalwareDomainList Hosts"] = {
url = "https://www.malwaredomainlist.com/hostslist/hosts.txt",
category = CUSTOM_CATEGORY_MALWARE,
format = "hosts",
enabled = false,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Anti-WebMiner"] = {
url = "https://raw.githubusercontent.com/greatis/Anti-WebMiner/master/hosts",
category = CUSTOM_CATEGORY_MINING,
format = "hosts",
enabled = false,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["NoCoin Filter List"] = {
url = "https://raw.githubusercontent.com/hoshsadiq/adblock-nocoin-list/master/hosts.txt",
category = CUSTOM_CATEGORY_MINING,
format = "hosts",
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["Disconnect.me Simple Ad List"] = {
url = "https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt",
category = CUSTOM_CATEGORY_ADVERTISEMENT,
format = "domain",
enabled = is_nedge,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["hpHosts Ad and Tracking"] = {
url = "https://hosts-file.net/ad_servers.txt",
category = CUSTOM_CATEGORY_ADVERTISEMENT,
format = "hosts",
enabled = is_nedge,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["AdAway default blocklist"] = {
url = "https://adaway.org/hosts.txt",
category = CUSTOM_CATEGORY_ADVERTISEMENT,
format = "hosts",
enabled = is_nedge,
update_interval = DEFAULT_UPDATE_INTERVAL,
}, ["SSLBL JA3"] = {
url = "https://sslbl.abuse.ch/blacklist/ja3_fingerprints.csv",
format = "ja3_suricata_csv",
category = CUSTOM_CATEGORY_MALWARE,
enabled = true,
update_interval = DEFAULT_UPDATE_INTERVAL,
}
}
-- ##############################################
@ -134,160 +134,160 @@ local STATUS_KEY = "ntopng.prefs.category_lists.status"
local function loadListsFromRedis()
local lists_metadata = ntop.getPref(METADATA_KEY)
local lists_status = ntop.getPref(STATUS_KEY)
local lists_metadata = ntop.getPref(METADATA_KEY)
local lists_status = ntop.getPref(STATUS_KEY)
if isEmptyString(lists_status) then
return {}
end
if isEmptyString(lists_status) then
return {}
end
local status = json.decode(lists_status)
local lists = {}
local status = json.decode(lists_status)
local lists = {}
if not isEmptyString(lists_metadata) then
lists = json.decode(lists_metadata)
end
if not isEmptyString(lists_metadata) then
lists = json.decode(lists_metadata)
end
lists = table.merge(BUILTIN_LISTS, lists)
lists = table.merge(BUILTIN_LISTS, lists)
if((lists == nil) or (status == nil)) then
return {}
end
if((lists == nil) or (status == nil)) then
return {}
end
for list_name, list in pairs(lists) do
if status[list_name] then
list.status = status[list_name]
end
end
for list_name, list in pairs(lists) do
if status[list_name] then
list.status = status[list_name]
end
end
return lists
return lists
end
-- ##############################################
local function saveListsStatusToRedis(lists)
local status = {}
local status = {}
for list_name, list in pairs(lists or {}) do
status[list_name] = list.status
end
for list_name, list in pairs(lists or {}) do
status[list_name] = list.status
end
ntop.setPref(STATUS_KEY, json.encode(status))
ntop.setPref(STATUS_KEY, json.encode(status))
end
-- ##############################################
local function saveListsMetadataToRedis(lists)
local metadata = {}
local metadata = {}
for list_name, list in pairs(lists or {}) do
local meta = table.clone(list)
meta.status = nil
for list_name, list in pairs(lists or {}) do
local meta = table.clone(list)
meta.status = nil
metadata[list_name] = meta
end
metadata[list_name] = meta
end
ntop.setPref(METADATA_KEY, json.encode(metadata))
ntop.setPref(METADATA_KEY, json.encode(metadata))
end
-- ##############################################
function lists_utils.getCategoryLists()
-- TODO add support for user defined urls
local lists = {}
local redis_lists = loadListsFromRedis()
-- TODO add support for user defined urls
local lists = {}
local redis_lists = loadListsFromRedis()
local default_status = {last_update=0, num_hosts=0, last_error=false, num_errors=0}
local default_status = {last_update=0, num_hosts=0, last_error=false, num_errors=0}
for key, default_values in pairs(BUILTIN_LISTS) do
local list = table.merge(default_values, redis_lists[key] or {status = {}})
list.status = table.merge(default_status, list.status)
lists[key] = list
end
for key, default_values in pairs(BUILTIN_LISTS) do
local list = table.merge(default_values, redis_lists[key] or {status = {}})
list.status = table.merge(default_status, list.status)
lists[key] = list
end
return lists
return lists
end
-- ##############################################
function lists_utils.editList(list_name, metadata_override)
local lists = lists_utils.getCategoryLists()
local list = lists[list_name]
local lists = lists_utils.getCategoryLists()
local list = lists[list_name]
if not list then
return false
end
if not list then
return false
end
list = table.merge(list, metadata_override)
lists[list_name] = list
list = table.merge(list, metadata_override)
lists[list_name] = list
saveListsMetadataToRedis(lists)
saveListsMetadataToRedis(lists)
-- Trigger a reload, for example for disabled lists
lists_utils.downloadLists()
-- Trigger a reload, for example for disabled lists
lists_utils.downloadLists()
end
-- ##############################################
-- Force a single list reload
function lists_utils.updateList(list_name)
ntop.setCache("ntopng.cache.category_lists.update." .. list_name, "1")
lists_utils.downloadLists()
ntop.setCache("ntopng.cache.category_lists.update." .. list_name, "1")
lists_utils.downloadLists()
end
-- ##############################################
local function initListCacheDir()
ntop.mkdir(os_utils.fixPath(string.format("%s/category_lists", dirs.workingdir)))
ntop.mkdir(os_utils.fixPath(string.format("%s/category_lists", dirs.workingdir)))
end
local function getListCacheFile(list_name, downloading)
local f = string.format("%s/category_lists/%s.txt", dirs.workingdir, list_name)
local f = string.format("%s/category_lists/%s.txt", dirs.workingdir, list_name)
if downloading then
f = string.format("%s.new", f)
end
if downloading then
f = string.format("%s.new", f)
end
return os_utils.fixPath(f)
return os_utils.fixPath(f)
end
-- ##############################################
local function getNextListUpdate(list)
local interval
local interval
if(list.status.last_error and (list.status.num_errors < MAX_LIST_ERRORS)) then
-- When the download fails, retry next hour
interval = 3600
else
interval = list.update_interval
end
if(list.status.last_error and (list.status.num_errors < MAX_LIST_ERRORS)) then
-- When the download fails, retry next hour
interval = 3600
else
interval = list.update_interval
end
local next_update
local next_update
-- align if possible
if interval == 3600 then
next_update = ntop.roundTime(list.status.last_update, 3600, false)
elseif interval == 86400 then
next_update = ntop.roundTime(list.status.last_update, 86400, true --[[ UTC align ]])
else
next_update = list.status.last_update + interval
end
-- align if possible
if interval == 3600 then
next_update = ntop.roundTime(list.status.last_update, 3600, false)
elseif interval == 86400 then
next_update = ntop.roundTime(list.status.last_update, 86400, true --[[ UTC align ]])
else
next_update = list.status.last_update + interval
end
return next_update
return next_update
end
-- Returns true if the given list should be updated
function lists_utils.shouldUpdate(list_name, list, now)
local list_file = getListCacheFile(list_name, false)
local next_update = getNextListUpdate(list)
local list_file = getListCacheFile(list_name, false)
local next_update = getNextListUpdate(list)
-- note: num_errors is used to avoid retying downloading the same list again when
-- the file does not exist
return(list.enabled and
((now >= next_update) or
(not ntop.exists(list_file) and (list.status.num_errors < MAX_LIST_ERRORS)) or
(ntop.getCache("ntopng.cache.category_lists.update." .. list_name) == "1")))
-- note: num_errors is used to avoid retying downloading the same list again when
-- the file does not exist
return(list.enabled and
((now >= next_update) or
(not ntop.exists(list_file) and (list.status.num_errors < MAX_LIST_ERRORS)) or
(ntop.getCache("ntopng.cache.category_lists.update." .. list_name) == "1")))
end
-- ##############################################
@ -295,267 +295,273 @@ end
-- Check if the lists require an update
-- Returns true after all the lists are processed, false otherwise
local function checkListsUpdate(timeout)
local lists = lists_utils.getCategoryLists()
local begin_time = os.time()
local now = begin_time
local lists = lists_utils.getCategoryLists()
local begin_time = os.time()
local now = begin_time
initListCacheDir()
initListCacheDir()
for list_name, list in pairsByKeys(lists) do
local list_file = getListCacheFile(list_name, false)
for list_name, list in pairsByKeys(lists) do
local list_file = getListCacheFile(list_name, false)
if lists_utils.shouldUpdate(list_name, list, now) then
local temp_fname = getListCacheFile(list_name, true)
if lists_utils.shouldUpdate(list_name, list, now) then
local temp_fname = getListCacheFile(list_name, true)
traceError(TRACE_INFO, TRACE_CONSOLE, string.format("Updating list '%s'...", list_name))
local started_at = os.time()
local res = ntop.httpFetch(list.url, temp_fname, timeout)
traceError(TRACE_INFO, TRACE_CONSOLE, string.format("Updating list '%s'...", list_name))
local started_at = os.time()
local res = ntop.httpFetch(list.url, temp_fname, timeout)
if(res and (res["RESPONSE_CODE"] == 200)) then
-- download was successful, replace the original file
os.rename(temp_fname, list_file)
list.status.last_error = false
list.status.num_errors = 0
else
-- failure
local respcode = 0
local last_error = i18n("delete_data.msg_err_unknown")
if(res and (res["RESPONSE_CODE"] == 200)) then
-- download was successful, replace the original file
os.rename(temp_fname, list_file)
list.status.last_error = false
list.status.num_errors = 0
else
-- failure
local respcode = 0
local last_error = i18n("delete_data.msg_err_unknown")
if res and res["ERROR"] then
last_error = res["ERROR"]
elseif res and res["RESPONSE_CODE"] ~= nil then
respcode = ternary(res["RESPONSE_CODE"], res["RESPONSE_CODE"], "-")
if res and res["ERROR"] then
last_error = res["ERROR"]
elseif res and res["RESPONSE_CODE"] ~= nil then
respcode = ternary(res["RESPONSE_CODE"], res["RESPONSE_CODE"], "-")
if res["IS_PARTIAL"] then
last_error = i18n("category_lists.connection_time_out", {duration=(os.time() - started_at)})
else
last_error = i18n("category_lists.server_returned_error")
end
if res["IS_PARTIAL"] then
last_error = i18n("category_lists.connection_time_out", {duration=(os.time() - started_at)})
else
last_error = i18n("category_lists.server_returned_error")
end
if(respcode > 0) then
last_error = last_error .. i18n("category_lists.http_code", {err_code = respcode})
end
end
if(respcode > 0) then
last_error = last_error .. i18n("category_lists.http_code", {err_code = respcode})
end
end
list.status.last_error = last_error
list.status.num_errors = list.status.num_errors + 1
list.status.last_error = last_error
list.status.num_errors = list.status.num_errors + 1
alerts_api.store(
alerts_api.categoryListsEntity(list_name),
alerts_api.listDownloadFailedType(list_name, last_error)
)
alerts_api.store(
alerts_api.categoryListsEntity(list_name),
alerts_api.listDownloadFailedType(list_name, last_error)
)
end
now = os.time()
-- set last_update even on failure to avoid blocking on the same list again
list.status.last_update = now
ntop.delCache("ntopng.cache.category_lists.update." .. list_name)
if now-begin_time >= timeout then
-- took too long, will resume on next housekeeping execution
break
end
end
end
now = os.time()
-- set last_update even on failure to avoid blocking on the same list again
list.status.last_update = now
ntop.delCache("ntopng.cache.category_lists.update." .. list_name)
-- update lists state
saveListsStatusToRedis(lists)
if now-begin_time >= timeout then
-- took too long, will resume on next housekeeping execution
break
end
end
end
-- update lists state
saveListsStatusToRedis(lists)
if now-begin_time >= timeout then
-- Still in progress, do not mark as finished yet
return false
else
return true
end
if now-begin_time >= timeout then
-- Still in progress, do not mark as finished yet
return false
else
return true
end
end
-- ##############################################
local function loadListItem(host, category, user_custom_categories)
category = tonumber(category)
-- Checking for "whitelisted hosts" (Format: !<host>)
if string.sub(host, 1, 1) == "!" then
return false
end
category = tonumber(category)
if category ~= nil then
--traceError(TRACE_NORMAL, TRACE_CONSOLE, host .. " -> " .. category)
-- Checking for "whitelisted hosts" (Format: !<host>)
if string.sub(host, 1, 1) == "!" then
return false
end
-- Checking for "whitelisted hosts"
if user_custom_categories[category] ~= nil then
local hosts_map = swapKeysValues(user_custom_categories[category])
if hosts_map["!"..host] ~= nil then
return false
if category ~= nil then
--traceError(TRACE_NORMAL, TRACE_CONSOLE, host .. " -> " .. category)
-- Checking for "whitelisted hosts"
if user_custom_categories[category] ~= nil then
local hosts_map = swapKeysValues(user_custom_categories[category])
if hosts_map["!"..host] ~= nil then
return false
end
end
end
if isIPv4(host) or isIPv4Network(host) then
ntop.loadCustomCategoryIp(host, category)
return true
else
ntop.loadCustomCategoryHost(host, category)
return true
end
end
if isIPv4(host) or isIPv4Network(host) then
ntop.loadCustomCategoryIp(host, category)
return true
else
ntop.loadCustomCategoryHost(host, category)
return true
end
end
return false
return false
end
-- ##############################################
local function parse_hosts_line(line)
local words = string.split(line, "%s+")
local host = nil
local words = string.split(line, "%s+")
local host = nil
if(words and (#words == 2)) then
host = words[2]
if(words and (#words == 2)) then
host = words[2]
if((host == "localhost") or (host == "127.0.0.1") or (host == "::1")) then
if((host == "localhost") or (host == "127.0.0.1") or (host == "::1")) then
host = nil
end
else
-- invalid host
host = nil
end
else
-- invalid host
host = nil
end
end
return(host)
return(host)
end
-- ##############################################
local function handle_ja3_suricata_csv_line(line)
local parts = string.split(line, ",")
local parts = string.split(line, ",")
if((parts ~= nil) and (#parts >= 1)) then
local md5_hash = parts[1]
if((parts ~= nil) and (#parts >= 1)) then
local md5_hash = parts[1]
if(string.len(md5_hash) == 32) then
ntop.loadMaliciousJA3Hash(string.lower(md5_hash))
return(true)
end
end
if(string.len(md5_hash) == 32) then
ntop.loadMaliciousJA3Hash(string.lower(md5_hash))
return(true)
end
end
return(false)
return(false)
end
-- ##############################################
-- Loads hosts from a list file on disk
local function loadFromListFile(list_name, list, user_custom_categories)
local list_fname = getListCacheFile(list_name)
local num_lines = 0
local f = io.open(list_fname, "r")
local list_fname = getListCacheFile(list_name)
local num_lines = 0
local f = io.open(list_fname, "r")
if f == nil then
if list.status.num_hosts > 0 then
-- avoid generating warnings during first startup
traceError(TRACE_WARNING, TRACE_CONSOLE, string.format("Could not find '%s'...", list_fname))
end
return 0
end
traceError(TRACE_INFO, TRACE_CONSOLE, string.format("Loading list '%s'...", list_fname))
for line in f:lines() do
local trimmed = line:match("^%s*(.-)%s*$")
if((string.len(trimmed) > 0) and not(string.starts(trimmed, "#"))) then
local host = trimmed
if list.format == "hosts" then
host = parse_hosts_line(trimmed)
elseif list.format == "ja3_suricata_csv" then
-- handled differently
if handle_ja3_suricata_csv_line(trimmed) then
num_lines = num_lines + 1
end
host = nil
if f == nil then
if list.status.num_hosts > 0 then
-- avoid generating warnings during first startup
traceError(TRACE_WARNING, TRACE_CONSOLE, string.format("Could not find '%s'...", list_fname))
end
if host then
if loadListItem(host, list.category, user_custom_categories) then
num_lines = num_lines + 1
end
end
end
end
return 0
end
f:close()
return num_lines
traceError(TRACE_INFO, TRACE_CONSOLE, string.format("Loading list '%s'...", list_fname))
for line in f:lines() do
if ntop.isShutdown() then
break
end
local trimmed = line:match("^%s*(.-)%s*$")
if((string.len(trimmed) > 0) and not(string.starts(trimmed, "#"))) then
local host = trimmed
if list.format == "hosts" then
host = parse_hosts_line(trimmed)
elseif list.format == "ja3_suricata_csv" then
-- handled differently
if handle_ja3_suricata_csv_line(trimmed) then
num_lines = num_lines + 1
end
host = nil
end
if host then
if loadListItem(host, list.category, user_custom_categories) then
num_lines = num_lines + 1
end
end
end
end
f:close()
return num_lines
end
-- ##############################################
-- NOTE: this must be executed in the same thread as checkListsUpdate
local function reloadListsNow()
local user_custom_categories = categories_utils.getAllCustomCategoryHosts()
local lists = lists_utils.getCategoryLists()
local start_t = os.time()
local user_custom_categories = categories_utils.getAllCustomCategoryHosts()
local lists = lists_utils.getCategoryLists()
local start_t = os.time()
-- Load hosts from cached URL lists
for list_name, list in pairsByKeys(lists) do
if list.enabled then
local new_hosts = loadFromListFile(list_name, list, user_custom_categories)
-- Load hosts from cached URL lists
for list_name, list in pairsByKeys(lists) do
if list.enabled then
local new_hosts = loadFromListFile(list_name, list, user_custom_categories)
if new_hosts > 0 then
list.status.num_hosts = new_hosts
if new_hosts > 0 then
list.status.num_hosts = new_hosts
end
end
end
end
end
-- update lists state
saveListsStatusToRedis(lists)
-- update lists state
saveListsStatusToRedis(lists)
-- Load user-customized categories
for category_id, hosts in pairs(user_custom_categories) do
for _, host in ipairs(hosts) do
loadListItem(host, category_id, user_custom_categories)
end
end
-- Load user-customized categories
for category_id, hosts in pairs(user_custom_categories) do
for _, host in ipairs(hosts) do
if ntop.isShutdown() then
break
end
loadListItem(host, category_id, user_custom_categories)
end
end
local end_t = os.time()
traceError(TRACE_INFO, TRACE_CONSOLE, string.format("Lists loaded in %d seconds", (end_t - start_t)))
local end_t = os.time()
traceError(TRACE_INFO, TRACE_CONSOLE, string.format("Lists loaded in %d seconds", (end_t - start_t)))
-- Reload into memory
ntop.reloadCustomCategories()
ntop.reloadJA3Hashes()
-- Reload into memory
ntop.reloadCustomCategories()
ntop.reloadJA3Hashes()
end
-- ##############################################
-- This avoids waiting for lists reload
function lists_utils.reloadLists()
ntop.setCache("ntopng.cache.reload_lists_utils", "1")
ntop.setCache("ntopng.cache.reload_lists_utils", "1")
end
-- This is necessary to avoid concurrency issues
function lists_utils.downloadLists()
ntop.setCache("ntopng.cache.download_lists_utils", "1")
ntop.setCache("ntopng.cache.download_lists_utils", "1")
end
-- ##############################################
-- This is run in housekeeping.lua
function lists_utils.checkReloadLists()
local reload_now = (ntop.getCache("ntopng.cache.reload_lists_utils") == "1")
local reload_now = (ntop.getCache("ntopng.cache.reload_lists_utils") == "1")
if ntop.getCache("ntopng.cache.download_lists_utils") == "1" then
if checkListsUpdate(60 --[[ timeout ]]) then
ntop.delCache("ntopng.cache.download_lists_utils")
-- lists where possibly updated, reload
reload_now = true
end
end
if ntop.getCache("ntopng.cache.download_lists_utils") == "1" then
if checkListsUpdate(60 --[[ timeout ]]) then
ntop.delCache("ntopng.cache.download_lists_utils")
-- lists where possibly updated, reload
reload_now = true
end
end
if reload_now then
reloadListsNow()
ntop.delCache("ntopng.cache.reload_lists_utils")
end
if reload_now then
reloadListsNow()
ntop.delCache("ntopng.cache.reload_lists_utils")
end
-- Possibly reload hosts blacklist status
ntop.checkReloadHostBlacklist()
-- Possibly reload hosts blacklist status
ntop.checkReloadHostBlacklist()
end
-- ##############################################