mirror of
https://github.com/safing/portmaster
synced 2025-09-10 23:14:35 +00:00
Add support for free-text search and minor bug fixes in netquery
This commit is contained in:
parent
bef911e925
commit
15f85b5ae9
6 changed files with 109 additions and 15 deletions
|
@ -99,9 +99,9 @@ WITH RECURSIVE epoch(x) AS (
|
|||
UNION ALL
|
||||
SELECT x+1 FROM epoch WHERE x+1 < strftime('%%s')+0
|
||||
)
|
||||
SELECT x as timestamp, COUNT(*) AS value FROM epoch
|
||||
SELECT x as timestamp, SUM(verdict IN (2, 5, 6)) AS value, SUM(verdict NOT IN (2, 5, 6)) as countBlocked FROM epoch
|
||||
JOIN connections
|
||||
ON strftime('%%s', connections.started)+0 <= timestamp+0 AND (connections.ended IS NULL OR strftime('%%s', connections.ended)+0 > timestamp+0)
|
||||
ON strftime('%%s', connections.started)+0 <= timestamp+0 AND (connections.ended IS NULL OR strftime('%%s', connections.ended)+0 >= timestamp+0)
|
||||
%s
|
||||
GROUP BY round(timestamp/10, 0)*10;`
|
||||
|
||||
|
@ -110,6 +110,28 @@ SELECT x as timestamp, COUNT(*) AS value FROM epoch
|
|||
return "", nil, err
|
||||
}
|
||||
|
||||
if params == nil {
|
||||
params = make(map[string]interface{})
|
||||
}
|
||||
|
||||
if req.TextSearch != nil {
|
||||
textSearch, textParams, err := req.TextSearch.toSQLConditionClause(ctx, schema, "", orm.DefaultEncodeConfig)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
if textSearch != "" {
|
||||
if clause != "" {
|
||||
clause += " AND "
|
||||
}
|
||||
clause += textSearch
|
||||
|
||||
for key, val := range textParams {
|
||||
params[key] = val
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if clause == "" {
|
||||
return fmt.Sprintf(template, ""), map[string]interface{}{}, nil
|
||||
}
|
||||
|
|
|
@ -65,7 +65,6 @@ type (
|
|||
// reused afterwards.
|
||||
ID string `sqlite:"id,primary"`
|
||||
ProfileID string `sqlite:"profile"`
|
||||
ProfileSource string `sqlite:"profileSource"`
|
||||
Path string `sqlite:"path"`
|
||||
Type string `sqlite:"type,varchar(8)"`
|
||||
External bool `sqlite:"external"`
|
||||
|
@ -92,6 +91,7 @@ type (
|
|||
ExtraData json.RawMessage `sqlite:"extra_data"`
|
||||
Allowed *bool `sqlite:"allowed"`
|
||||
ProfileRevision int `sqlite:"profile_revision"`
|
||||
ExitNode *string `sqlite:"exit_node"`
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -174,8 +174,7 @@ func convertConnection(conn *network.Connection) (*Conn, error) {
|
|||
Internal: conn.Internal,
|
||||
Direction: direction,
|
||||
Type: ConnectionTypeToString[conn.Type],
|
||||
ProfileID: conn.ProcessContext.Profile,
|
||||
ProfileSource: conn.ProcessContext.Source,
|
||||
ProfileID: conn.ProcessContext.Source + "/" + conn.ProcessContext.Profile,
|
||||
Path: conn.ProcessContext.BinaryPath,
|
||||
ProfileRevision: int(conn.ProfileRevisionCounter),
|
||||
}
|
||||
|
@ -191,6 +190,11 @@ func convertConnection(conn *network.Connection) (*Conn, error) {
|
|||
case network.VerdictAccept, network.VerdictRerouteToNameserver, network.VerdictRerouteToTunnel:
|
||||
accepted := true
|
||||
c.Allowed = &accepted
|
||||
case network.VerdictUndecided, network.VerdictUndeterminable:
|
||||
c.Allowed = nil
|
||||
default:
|
||||
allowed := false
|
||||
c.Allowed = &allowed
|
||||
}
|
||||
|
||||
if conn.Ended > 0 {
|
||||
|
@ -198,12 +202,25 @@ func convertConnection(conn *network.Connection) (*Conn, error) {
|
|||
c.Ended = &ended
|
||||
}
|
||||
|
||||
extraData := map[string]interface{}{}
|
||||
extraData := map[string]interface{}{
|
||||
"pid": conn.ProcessContext.PID,
|
||||
}
|
||||
|
||||
if conn.TunnelContext != nil {
|
||||
extraData["tunnel"] = conn.TunnelContext
|
||||
exitNode := conn.TunnelContext.GetExitNodeID()
|
||||
c.ExitNode = &exitNode
|
||||
}
|
||||
|
||||
if conn.DNSContext != nil {
|
||||
extraData["dns"] = conn.DNSContext
|
||||
}
|
||||
|
||||
// TODO(ppacher): enable when TLS inspection is merged
|
||||
// if conn.TLSContext != nil {
|
||||
// extraData["tls"] = conn.TLSContext
|
||||
// }
|
||||
|
||||
if conn.Entity != nil {
|
||||
extraData["cname"] = conn.Entity.CNAME
|
||||
extraData["blockedByLists"] = conn.Entity.BlockedByLists
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/safing/portmaster/netquery/orm"
|
||||
"zombiezen.com/go/sqlite"
|
||||
)
|
||||
|
||||
type (
|
||||
|
@ -53,11 +54,17 @@ type (
|
|||
|
||||
Selects []Select
|
||||
|
||||
TextSearch struct {
|
||||
Fields []string `json:"fields"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
|
||||
QueryRequestPayload struct {
|
||||
Select Selects `json:"select"`
|
||||
Query Query `json:"query"`
|
||||
OrderBy OrderBys `json:"orderBy"`
|
||||
GroupBy []string `json:"groupBy"`
|
||||
TextSearch *TextSearch `json:"textSearch"`
|
||||
|
||||
Pagination
|
||||
|
||||
|
@ -68,6 +75,7 @@ type (
|
|||
|
||||
QueryActiveConnectionChartPayload struct {
|
||||
Query Query `json:"query"`
|
||||
TextSearch *TextSearch `json:"textSearch"`
|
||||
}
|
||||
|
||||
OrderBy struct {
|
||||
|
@ -231,6 +239,34 @@ func (match Matcher) Validate() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (text TextSearch) toSQLConditionClause(ctx context.Context, schema *orm.TableSchema, suffix string, encoderConfig orm.EncodeConfig) (string, map[string]interface{}, error) {
|
||||
var (
|
||||
queryParts []string
|
||||
params = make(map[string]interface{})
|
||||
)
|
||||
|
||||
key := fmt.Sprintf(":t%s", suffix)
|
||||
params[key] = fmt.Sprintf("%%%s%%", text.Value)
|
||||
|
||||
for _, field := range text.Fields {
|
||||
colDef := schema.GetColumnDef(field)
|
||||
if colDef == nil {
|
||||
return "", nil, fmt.Errorf("column %s is not allowed in text-search", colDef.Name)
|
||||
}
|
||||
if colDef.Type != sqlite.TypeText {
|
||||
return "", nil, fmt.Errorf("type of column %s cannot be used in text-search", colDef.Name)
|
||||
}
|
||||
|
||||
queryParts = append(queryParts, fmt.Sprintf("%s LIKE %s", colDef.Name, key))
|
||||
}
|
||||
|
||||
if len(queryParts) == 0 {
|
||||
return "", nil, nil
|
||||
}
|
||||
|
||||
return "( " + strings.Join(queryParts, " OR ") + " )", params, nil
|
||||
}
|
||||
|
||||
func (match Matcher) toSQLConditionClause(ctx context.Context, suffix string, conjunction string, colDef orm.ColumnDef, encoderConfig orm.EncodeConfig) (string, map[string]interface{}, error) {
|
||||
var (
|
||||
queryParts []string
|
||||
|
|
|
@ -152,7 +152,7 @@ func (req *QueryRequestPayload) generateSQL(ctx context.Context, schema *orm.Tab
|
|||
orm.DefaultEncodeConfig,
|
||||
)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("ganerating where clause: %w", err)
|
||||
return "", nil, fmt.Errorf("generating where clause: %w", err)
|
||||
}
|
||||
|
||||
if req.paramMap == nil {
|
||||
|
@ -163,8 +163,24 @@ func (req *QueryRequestPayload) generateSQL(ctx context.Context, schema *orm.Tab
|
|||
req.paramMap[key] = val
|
||||
}
|
||||
|
||||
// build the actual SQL query statement
|
||||
// FIXME(ppacher): add support for group-by and sort-by
|
||||
if req.TextSearch != nil {
|
||||
textClause, textParams, err := req.TextSearch.toSQLConditionClause(ctx, schema, "", orm.DefaultEncodeConfig)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("generating text-search clause: %w", err)
|
||||
}
|
||||
|
||||
if textClause != "" {
|
||||
if whereClause != "" {
|
||||
whereClause += " AND "
|
||||
}
|
||||
|
||||
whereClause += textClause
|
||||
|
||||
for key, val := range textParams {
|
||||
req.paramMap[key] = val
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
groupByClause, err := req.generateGroupByClause(schema)
|
||||
if err != nil {
|
||||
|
|
|
@ -149,7 +149,10 @@ type Connection struct { //nolint:maligned // TODO: fix alignment
|
|||
DNSContext *resolver.DNSRequestContext
|
||||
// TunnelContext holds additional information about the tunnel that this
|
||||
// connection is using.
|
||||
TunnelContext interface{}
|
||||
TunnelContext interface {
|
||||
GetExitNodeID() string
|
||||
}
|
||||
|
||||
// Internal is set to true if the connection is attributed as an
|
||||
// Portmaster internal connection. Internal may be set at different
|
||||
// points and access to it must be guarded by the connection lock.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue