nDPI/fuzz/fuzz_ds_tree.cpp
Ivan Nardi b51a2ac72a
fuzz: some improvements and add two new fuzzers (#1881)
Remove `FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION` define from
`fuzz/Makefile.am`; it is already included by the main configure script
(when fuzzing).

Add a knob to force disabling of AESNI optimizations: this way we can
fuzz also no-aesni crypto code.

Move CRC32 algorithm into the library.

Add some fake traces to extend fuzzing coverage. Note that these traces
are hand-made (via scapy/curl) and must not be used as "proof" that the
dissectors are really able to identify this kind of traffic.

Some small updates to some dissectors:

CSGO: remove a wrong rule (never triggered, BTW). Any UDP packet starting
with "VS01" will be classified as STEAM (see steam.c around line 111).
Googling it, it seems right so.

XBOX: XBOX only analyses UDP flows while HTTP only TCP ones; therefore
that condition is false.

RTP, STUN: removed useless "break"s

Zattoo: `flow->zattoo_stage` is never set to any values greater or equal
to 5, so these checks are never true.

PPStream: `flow->l4.udp.ppstream_stage` is never read. Delete it.

TeamSpeak: we check for `flow->packet_counter == 3` just above, so the
following check `flow->packet_counter >= 3` is always false.
2023-02-09 20:02:12 +01:00

96 lines
2.4 KiB
C++

#include "ndpi_api.h"
#include "fuzz_common_code.h"
#include <stdint.h>
#include <stdio.h>
#include <assert.h>
#include "fuzzer/FuzzedDataProvider.h"
static int __compare(const void *a, const void *b)
{
u_int32_t *entry_a, *entry_b;
entry_a = (u_int32_t *)a;
entry_b = (u_int32_t *)b;
return *entry_a == *entry_b ? 0 : (*entry_a < *entry_b ? -1 : +1);
}
static void __free(void * const node)
{
u_int32_t *entry = (u_int32_t *)node;
ndpi_free(entry);
}
static void __walk(const void *a, ndpi_VISIT which, int depth, void *user_data)
{
assert(user_data == NULL && a);
}
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
FuzzedDataProvider fuzzed_data(data, size);
u_int16_t i, num_iteration, is_added = 0;
void *root = NULL;
u_int32_t *entry, value_added, e, *e2;
/* Just to have some data */
if (fuzzed_data.remaining_bytes() < 1024)
return -1;
/* To allow memory allocation failures */
fuzz_set_alloc_callbacks_and_seed(size);
num_iteration = fuzzed_data.ConsumeIntegral<u_int8_t>();
for (i = 0; i < num_iteration; i++) {
entry = (u_int32_t *)ndpi_malloc(sizeof(u_int32_t));
if (!entry)
continue;
*entry = fuzzed_data.ConsumeIntegral<u_int32_t>();
if(ndpi_tfind(entry, &root, __compare) == NULL) {
if(ndpi_tsearch(entry, &root, __compare) == NULL) {
ndpi_free(entry);
} else {
/* Keep one random entry really added */
if (is_added == 0 && fuzzed_data.ConsumeBool()) {
value_added = *entry;
is_added = 1;
}
}
} else {
ndpi_free(entry);
}
}
/* "Random" search */
num_iteration = fuzzed_data.ConsumeIntegral<u_int8_t>();
for (i = 0; i < num_iteration; i++) {
e = fuzzed_data.ConsumeIntegral<u_int32_t>();
ndpi_tfind(&e, &root, __compare);
}
/* Search of an added node */
if (is_added) {
ndpi_tfind(&value_added, &root, __compare);
}
ndpi_twalk(root, __walk, NULL);
/* "Random" delete */
num_iteration = fuzzed_data.ConsumeIntegral<u_int8_t>();
for (i = 0; i < num_iteration; i++) {
e = fuzzed_data.ConsumeIntegral<u_int32_t>();
e2 = (u_int32_t *)ndpi_tdelete(&e, &root, __compare);
ndpi_free(e2);
}
/* Delete of an added node */
if (is_added) {
e2 = (u_int32_t *)ndpi_tdelete(&value_added, &root, __compare);
ndpi_free(e2);
}
ndpi_twalk(root, __walk, NULL);
ndpi_tdestroy(root, __free);
return 0;
}