2020-04-22 20:59:40 +00:00
|
|
|
|
package mysql
|
|
|
|
|
|
|
|
|
|
|
|
import (
|
2021-09-14 12:11:07 +00:00
|
|
|
|
"context"
|
2021-09-07 16:48:04 +00:00
|
|
|
|
"crypto/rand"
|
|
|
|
|
|
"crypto/rsa"
|
|
|
|
|
|
"crypto/x509"
|
|
|
|
|
|
"crypto/x509/pkix"
|
2021-09-01 19:50:52 +00:00
|
|
|
|
"database/sql"
|
2021-09-07 16:48:04 +00:00
|
|
|
|
"encoding/pem"
|
2021-11-15 14:11:38 +00:00
|
|
|
|
"errors"
|
2021-09-07 16:48:04 +00:00
|
|
|
|
"fmt"
|
|
|
|
|
|
"math/big"
|
|
|
|
|
|
"net"
|
|
|
|
|
|
"os"
|
2020-04-22 20:59:40 +00:00
|
|
|
|
"testing"
|
2021-09-01 19:50:52 +00:00
|
|
|
|
"time"
|
2020-04-22 20:59:40 +00:00
|
|
|
|
|
2021-03-17 18:23:52 +00:00
|
|
|
|
"github.com/DATA-DOG/go-sqlmock"
|
|
|
|
|
|
"github.com/VividCortex/mysqlerr"
|
2021-09-07 16:48:04 +00:00
|
|
|
|
"github.com/WatchBeam/clock"
|
|
|
|
|
|
"github.com/fleetdm/fleet/v4/server/config"
|
2023-06-19 17:55:15 +00:00
|
|
|
|
"github.com/fleetdm/fleet/v4/server/contexts/ctxdb"
|
2025-02-18 21:28:54 +00:00
|
|
|
|
"github.com/fleetdm/fleet/v4/server/datastore/mysql/common_mysql/testing_utils"
|
2021-06-26 04:46:51 +00:00
|
|
|
|
"github.com/fleetdm/fleet/v4/server/fleet"
|
|
|
|
|
|
"github.com/fleetdm/fleet/v4/server/ptr"
|
2024-06-17 13:27:31 +00:00
|
|
|
|
"github.com/go-kit/log"
|
2021-03-17 18:23:52 +00:00
|
|
|
|
"github.com/go-sql-driver/mysql"
|
|
|
|
|
|
"github.com/jmoiron/sqlx"
|
2020-04-22 20:59:40 +00:00
|
|
|
|
"github.com/stretchr/testify/assert"
|
2021-03-17 18:23:52 +00:00
|
|
|
|
"github.com/stretchr/testify/require"
|
2020-04-22 20:59:40 +00:00
|
|
|
|
)
|
|
|
|
|
|
|
2021-09-01 19:50:52 +00:00
|
|
|
|
func TestDatastoreReplica(t *testing.T) {
|
|
|
|
|
|
// a bit unfortunate to create temp databases just for this - could be mixed
|
|
|
|
|
|
// with other tests when/if we move to subtests to minimize the number of
|
|
|
|
|
|
// databases created for tests (see #1805).
|
|
|
|
|
|
|
2023-06-19 17:55:15 +00:00
|
|
|
|
ctx := context.Background()
|
2021-09-01 19:50:52 +00:00
|
|
|
|
t.Run("noreplica", func(t *testing.T) {
|
|
|
|
|
|
ds := CreateMySQLDSWithOptions(t, nil)
|
|
|
|
|
|
defer ds.Close()
|
2023-06-19 17:55:15 +00:00
|
|
|
|
require.Equal(t, ds.reader(ctx), ds.writer(ctx))
|
2021-09-01 19:50:52 +00:00
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
t.Run("replica", func(t *testing.T) {
|
2025-02-18 21:28:54 +00:00
|
|
|
|
opts := &testing_utils.DatastoreTestOptions{DummyReplica: true}
|
2021-09-01 19:50:52 +00:00
|
|
|
|
ds := CreateMySQLDSWithOptions(t, opts)
|
|
|
|
|
|
defer ds.Close()
|
2023-06-19 17:55:15 +00:00
|
|
|
|
require.NotEqual(t, ds.reader(ctx), ds.writer(ctx))
|
2021-09-01 19:50:52 +00:00
|
|
|
|
|
|
|
|
|
|
// create a new host
|
2023-06-19 17:55:15 +00:00
|
|
|
|
host, err := ds.NewHost(ctx, &fleet.Host{
|
2021-09-01 19:50:52 +00:00
|
|
|
|
DetailUpdatedAt: time.Now(),
|
|
|
|
|
|
LabelUpdatedAt: time.Now(),
|
2021-09-27 19:27:38 +00:00
|
|
|
|
PolicyUpdatedAt: time.Now(),
|
2021-09-01 19:50:52 +00:00
|
|
|
|
SeenTime: time.Now(),
|
2022-12-26 21:32:39 +00:00
|
|
|
|
NodeKey: ptr.String("1"),
|
2021-09-01 19:50:52 +00:00
|
|
|
|
UUID: "1",
|
|
|
|
|
|
Hostname: "foo.local",
|
|
|
|
|
|
PrimaryIP: "192.168.1.1",
|
|
|
|
|
|
PrimaryMac: "30-65-EC-6F-C4-58",
|
|
|
|
|
|
})
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.NotNil(t, host)
|
|
|
|
|
|
|
|
|
|
|
|
// trying to read it fails, not replicated yet
|
2023-06-19 17:55:15 +00:00
|
|
|
|
_, err = ds.Host(ctx, host.ID)
|
|
|
|
|
|
require.Error(t, err)
|
2024-02-13 18:03:53 +00:00
|
|
|
|
require.True(t, errors.Is(err, sql.ErrNoRows), err)
|
2023-06-19 17:55:15 +00:00
|
|
|
|
|
|
|
|
|
|
// force read from primary works
|
|
|
|
|
|
ctx = ctxdb.RequirePrimary(ctx, true)
|
|
|
|
|
|
got, err := ds.Host(ctx, host.ID)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.Equal(t, host.ID, got.ID)
|
|
|
|
|
|
|
|
|
|
|
|
// but from replica still fails
|
|
|
|
|
|
ctx = ctxdb.RequirePrimary(ctx, false)
|
|
|
|
|
|
_, err = ds.Host(ctx, host.ID)
|
2021-09-01 19:50:52 +00:00
|
|
|
|
require.Error(t, err)
|
|
|
|
|
|
require.True(t, errors.Is(err, sql.ErrNoRows))
|
|
|
|
|
|
|
|
|
|
|
|
opts.RunReplication()
|
|
|
|
|
|
|
2023-06-19 17:55:15 +00:00
|
|
|
|
// now it can read it from replica
|
|
|
|
|
|
got, err = ds.Host(ctx, host.ID)
|
2021-09-01 19:50:52 +00:00
|
|
|
|
require.NoError(t, err)
|
2023-06-19 17:55:15 +00:00
|
|
|
|
require.Equal(t, host.ID, got.ID)
|
2021-09-01 19:50:52 +00:00
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-04-22 20:59:40 +00:00
|
|
|
|
func TestSanitizeColumn(t *testing.T) {
|
2021-02-17 00:53:42 +00:00
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
2020-04-22 20:59:40 +00:00
|
|
|
|
testCases := []struct {
|
|
|
|
|
|
input string
|
|
|
|
|
|
output string
|
|
|
|
|
|
}{
|
2022-10-08 12:57:46 +00:00
|
|
|
|
{"", ""},
|
|
|
|
|
|
{"foobar-column", "`foobar-column`"},
|
|
|
|
|
|
{"foobar_column", "`foobar_column`"},
|
|
|
|
|
|
{"foobar;column", "`foobarcolumn`"},
|
|
|
|
|
|
{"foobar#", "`foobar`"},
|
|
|
|
|
|
{"foobar*baz", "`foobarbaz`"},
|
|
|
|
|
|
{"....", ""},
|
|
|
|
|
|
{"h.id", "`h`.`id`"},
|
2023-02-20 17:16:56 +00:00
|
|
|
|
{"id;delete from hosts", "`iddeletefromhosts`"},
|
|
|
|
|
|
{"select * from foo", "`selectfromfoo`"},
|
2020-04-22 20:59:40 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, tt := range testCases {
|
|
|
|
|
|
t.Run(tt.input, func(t *testing.T) {
|
2022-10-08 12:57:46 +00:00
|
|
|
|
require.Equal(t, tt.output, sanitizeColumn(tt.input))
|
2020-04-22 20:59:40 +00:00
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-02-17 00:53:42 +00:00
|
|
|
|
|
|
|
|
|
|
func TestSearchLike(t *testing.T) {
|
|
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
|
|
|
|
testCases := []struct {
|
|
|
|
|
|
inSQL string
|
|
|
|
|
|
inParams []interface{}
|
|
|
|
|
|
match string
|
|
|
|
|
|
columns []string
|
|
|
|
|
|
outSQL string
|
|
|
|
|
|
outParams []interface{}
|
|
|
|
|
|
}{
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS WHERE TRUE",
|
|
|
|
|
|
inParams: []interface{}{},
|
|
|
|
|
|
match: "foobar",
|
|
|
|
|
|
columns: []string{"hostname"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS WHERE TRUE AND (hostname LIKE ?)",
|
|
|
|
|
|
outParams: []interface{}{"%foobar%"},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS WHERE TRUE",
|
|
|
|
|
|
inParams: []interface{}{3},
|
|
|
|
|
|
match: "foobar",
|
|
|
|
|
|
columns: []string{},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS WHERE TRUE",
|
|
|
|
|
|
outParams: []interface{}{3},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS WHERE TRUE",
|
|
|
|
|
|
inParams: []interface{}{1},
|
|
|
|
|
|
match: "foobar",
|
|
|
|
|
|
columns: []string{"hostname"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS WHERE TRUE AND (hostname LIKE ?)",
|
|
|
|
|
|
outParams: []interface{}{1, "%foobar%"},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS WHERE TRUE",
|
|
|
|
|
|
inParams: []interface{}{1},
|
|
|
|
|
|
match: "foobar",
|
|
|
|
|
|
columns: []string{"hostname", "uuid"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS WHERE TRUE AND (hostname LIKE ? OR uuid LIKE ?)",
|
|
|
|
|
|
outParams: []interface{}{1, "%foobar%", "%foobar%"},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS WHERE TRUE",
|
|
|
|
|
|
inParams: []interface{}{1},
|
|
|
|
|
|
match: "foobar",
|
|
|
|
|
|
columns: []string{"hostname", "uuid"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS WHERE TRUE AND (hostname LIKE ? OR uuid LIKE ?)",
|
|
|
|
|
|
outParams: []interface{}{1, "%foobar%", "%foobar%"},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS WHERE 1=1",
|
|
|
|
|
|
inParams: []interface{}{1},
|
|
|
|
|
|
match: "forty_%",
|
|
|
|
|
|
columns: []string{"ipv4", "uuid"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS WHERE 1=1 AND (ipv4 LIKE ? OR uuid LIKE ?)",
|
|
|
|
|
|
outParams: []interface{}{1, "%forty\\_\\%%", "%forty\\_\\%%"},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS WHERE 1=1",
|
|
|
|
|
|
inParams: []interface{}{1},
|
|
|
|
|
|
match: "forty_%",
|
|
|
|
|
|
columns: []string{"ipv4", "uuid"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS WHERE 1=1 AND (ipv4 LIKE ? OR uuid LIKE ?)",
|
|
|
|
|
|
outParams: []interface{}{1, "%forty\\_\\%%", "%forty\\_\\%%"},
|
|
|
|
|
|
},
|
2021-12-21 20:36:19 +00:00
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS WHERE 1=1",
|
|
|
|
|
|
inParams: []interface{}{1},
|
|
|
|
|
|
match: "a@b.c",
|
|
|
|
|
|
columns: []string{"ipv4", "uuid"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS WHERE 1=1 AND (ipv4 LIKE ? OR uuid LIKE ?)",
|
|
|
|
|
|
outParams: []interface{}{1, "%a@b.c%", "%a@b.c%"},
|
|
|
|
|
|
},
|
2021-02-17 00:53:42 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, tt := range testCases {
|
|
|
|
|
|
t.Run("", func(t *testing.T) {
|
|
|
|
|
|
sql, params := searchLike(tt.inSQL, tt.inParams, tt.match, tt.columns...)
|
2021-12-21 20:36:19 +00:00
|
|
|
|
assert.Equal(t, tt.outSQL, sql)
|
|
|
|
|
|
assert.Equal(t, tt.outParams, params)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestHostSearchLike(t *testing.T) {
|
|
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
|
|
|
|
testCases := []struct {
|
|
|
|
|
|
inSQL string
|
|
|
|
|
|
inParams []interface{}
|
|
|
|
|
|
match string
|
|
|
|
|
|
columns []string
|
|
|
|
|
|
outSQL string
|
|
|
|
|
|
outParams []interface{}
|
|
|
|
|
|
}{
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS h WHERE TRUE",
|
|
|
|
|
|
inParams: []interface{}{},
|
|
|
|
|
|
match: "foobar",
|
|
|
|
|
|
columns: []string{"hostname"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS h WHERE TRUE AND (hostname LIKE ?)",
|
|
|
|
|
|
outParams: []interface{}{"%foobar%"},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
inSQL: "SELECT * FROM HOSTS h WHERE 1=1",
|
|
|
|
|
|
inParams: []interface{}{1},
|
|
|
|
|
|
match: "a@b.c",
|
|
|
|
|
|
columns: []string{"ipv4"},
|
|
|
|
|
|
outSQL: "SELECT * FROM HOSTS h WHERE 1=1 AND (ipv4 LIKE ? OR ( EXISTS (SELECT 1 FROM host_emails he WHERE he.host_id = h.id AND he.email LIKE ?)))",
|
|
|
|
|
|
outParams: []interface{}{1, "%a@b.c%", "%a@b.c%"},
|
|
|
|
|
|
},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, tt := range testCases {
|
|
|
|
|
|
t.Run("", func(t *testing.T) {
|
2023-05-18 14:01:57 +00:00
|
|
|
|
sql, params, _ := hostSearchLike(tt.inSQL, tt.inParams, tt.match, tt.columns...)
|
2021-02-17 00:53:42 +00:00
|
|
|
|
assert.Equal(t, tt.outSQL, sql)
|
|
|
|
|
|
assert.Equal(t, tt.outParams, params)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-03-17 18:23:52 +00:00
|
|
|
|
|
|
|
|
|
|
func mockDatastore(t *testing.T) (sqlmock.Sqlmock, *Datastore) {
|
|
|
|
|
|
db, mock, err := sqlmock.New()
|
|
|
|
|
|
require.NoError(t, err)
|
2021-09-01 19:50:52 +00:00
|
|
|
|
dbmock := sqlx.NewDb(db, "sqlmock")
|
2021-03-17 18:23:52 +00:00
|
|
|
|
ds := &Datastore{
|
2023-06-19 17:55:15 +00:00
|
|
|
|
primary: dbmock,
|
|
|
|
|
|
replica: dbmock,
|
|
|
|
|
|
logger: log.NewNopLogger(),
|
2021-03-17 18:23:52 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return mock, ds
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithRetryTxxSuccess(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnResult(sqlmock.NewResult(1, 1))
|
|
|
|
|
|
mock.ExpectCommit()
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
require.NoError(t, ds.withRetryTxx(context.Background(), func(tx sqlx.ExtContext) error {
|
|
|
|
|
|
_, err := tx.ExecContext(context.Background(), "SELECT 1")
|
2021-03-17 18:23:52 +00:00
|
|
|
|
return err
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithRetryTxxRollbackSuccess(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnError(errors.New("fail"))
|
|
|
|
|
|
mock.ExpectRollback()
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
require.Error(t, ds.withRetryTxx(context.Background(), func(tx sqlx.ExtContext) error {
|
|
|
|
|
|
_, err := tx.ExecContext(context.Background(), "SELECT 1")
|
2021-03-17 18:23:52 +00:00
|
|
|
|
return err
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithRetryTxxRollbackError(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnError(errors.New("fail"))
|
|
|
|
|
|
mock.ExpectRollback().WillReturnError(errors.New("rollback failed"))
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
require.Error(t, ds.withRetryTxx(context.Background(), func(tx sqlx.ExtContext) error {
|
|
|
|
|
|
_, err := tx.ExecContext(context.Background(), "SELECT 1")
|
2021-03-17 18:23:52 +00:00
|
|
|
|
return err
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithRetryTxxRetrySuccess(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
// Return a retryable error
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnError(&mysql.MySQLError{Number: mysqlerr.ER_LOCK_DEADLOCK})
|
|
|
|
|
|
mock.ExpectRollback()
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnResult(sqlmock.NewResult(1, 1))
|
|
|
|
|
|
mock.ExpectCommit()
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
assert.NoError(t, ds.withRetryTxx(context.Background(), func(tx sqlx.ExtContext) error {
|
|
|
|
|
|
_, err := tx.ExecContext(context.Background(), "SELECT 1")
|
2021-03-17 18:23:52 +00:00
|
|
|
|
return err
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithRetryTxxCommitRetrySuccess(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnResult(sqlmock.NewResult(1, 1))
|
|
|
|
|
|
// Return a retryable error
|
|
|
|
|
|
mock.ExpectCommit().WillReturnError(&mysql.MySQLError{Number: mysqlerr.ER_LOCK_DEADLOCK})
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnResult(sqlmock.NewResult(1, 1))
|
|
|
|
|
|
mock.ExpectCommit()
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
assert.NoError(t, ds.withRetryTxx(context.Background(), func(tx sqlx.ExtContext) error {
|
|
|
|
|
|
_, err := tx.ExecContext(context.Background(), "SELECT 1")
|
2021-03-17 18:23:52 +00:00
|
|
|
|
return err
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithRetryTxxCommitError(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnResult(sqlmock.NewResult(1, 1))
|
|
|
|
|
|
// Return a retryable error
|
|
|
|
|
|
mock.ExpectCommit().WillReturnError(errors.New("fail"))
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
assert.Error(t, ds.withRetryTxx(context.Background(), func(tx sqlx.ExtContext) error {
|
|
|
|
|
|
_, err := tx.ExecContext(context.Background(), "SELECT 1")
|
2021-03-17 18:23:52 +00:00
|
|
|
|
return err
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
2021-04-05 18:04:53 +00:00
|
|
|
|
|
|
|
|
|
|
func TestAppendListOptionsToSQL(t *testing.T) {
|
2021-11-15 14:34:51 +00:00
|
|
|
|
sql := "SELECT * FROM my_table"
|
2021-06-06 22:07:29 +00:00
|
|
|
|
opts := fleet.ListOptions{
|
2022-10-08 12:57:46 +00:00
|
|
|
|
OrderKey: "***name***",
|
2021-04-05 18:04:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-11-09 19:18:29 +00:00
|
|
|
|
actual, _ := appendListOptionsToSQL(sql, &opts)
|
2022-10-08 12:57:46 +00:00
|
|
|
|
expected := "SELECT * FROM my_table ORDER BY `name` ASC LIMIT 1000000"
|
2021-04-05 18:04:53 +00:00
|
|
|
|
if actual != expected {
|
|
|
|
|
|
t.Error("Expected", expected, "Actual", actual)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-11-15 14:34:51 +00:00
|
|
|
|
sql = "SELECT * FROM my_table"
|
2021-06-06 22:07:29 +00:00
|
|
|
|
opts.OrderDirection = fleet.OrderDescending
|
2023-11-09 19:18:29 +00:00
|
|
|
|
actual, _ = appendListOptionsToSQL(sql, &opts)
|
2022-10-08 12:57:46 +00:00
|
|
|
|
expected = "SELECT * FROM my_table ORDER BY `name` DESC LIMIT 1000000"
|
2021-04-05 18:04:53 +00:00
|
|
|
|
if actual != expected {
|
|
|
|
|
|
t.Error("Expected", expected, "Actual", actual)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-06-06 22:07:29 +00:00
|
|
|
|
opts = fleet.ListOptions{
|
2021-04-05 18:04:53 +00:00
|
|
|
|
PerPage: 10,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-11-15 14:34:51 +00:00
|
|
|
|
sql = "SELECT * FROM my_table"
|
2023-11-09 19:18:29 +00:00
|
|
|
|
actual, _ = appendListOptionsToSQL(sql, &opts)
|
2021-11-15 14:34:51 +00:00
|
|
|
|
expected = "SELECT * FROM my_table LIMIT 10"
|
2021-04-05 18:04:53 +00:00
|
|
|
|
if actual != expected {
|
|
|
|
|
|
t.Error("Expected", expected, "Actual", actual)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-11-15 14:34:51 +00:00
|
|
|
|
sql = "SELECT * FROM my_table"
|
2021-04-05 18:04:53 +00:00
|
|
|
|
opts.Page = 2
|
2023-11-09 19:18:29 +00:00
|
|
|
|
actual, _ = appendListOptionsToSQL(sql, &opts)
|
2021-11-15 14:34:51 +00:00
|
|
|
|
expected = "SELECT * FROM my_table LIMIT 10 OFFSET 20"
|
2021-04-05 18:04:53 +00:00
|
|
|
|
if actual != expected {
|
|
|
|
|
|
t.Error("Expected", expected, "Actual", actual)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-06-06 22:07:29 +00:00
|
|
|
|
opts = fleet.ListOptions{}
|
2021-11-15 14:34:51 +00:00
|
|
|
|
sql = "SELECT * FROM my_table"
|
2023-11-09 19:18:29 +00:00
|
|
|
|
actual, _ = appendListOptionsToSQL(sql, &opts)
|
2021-11-15 14:34:51 +00:00
|
|
|
|
expected = "SELECT * FROM my_table LIMIT 1000000"
|
2021-04-05 18:04:53 +00:00
|
|
|
|
|
|
|
|
|
|
if actual != expected {
|
|
|
|
|
|
t.Error("Expected", expected, "Actual", actual)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-05-25 04:34:08 +00:00
|
|
|
|
|
|
|
|
|
|
func TestWhereFilterHostsByTeams(t *testing.T) {
|
|
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
|
|
|
|
testCases := []struct {
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter fleet.TeamFilter
|
2021-05-25 04:34:08 +00:00
|
|
|
|
expected string
|
|
|
|
|
|
}{
|
|
|
|
|
|
// No teams or global role
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{Teams: []fleet.UserTeam{}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
|
|
// Global role
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleAdmin)},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
expected: "TRUE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleMaintainer)},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
expected: "TRUE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "TRUE",
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
|
|
// Team roles
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (1)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 2}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (2)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (1,2)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
// Invalid role should be ignored
|
2021-06-06 22:07:29 +00:00
|
|
|
|
{Role: "bad", Team: fleet.Team{ID: 37}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (2)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2021-06-06 22:07:29 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
{Role: fleet.RoleAdmin, Team: fleet.Team{ID: 3}},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
// Invalid role should be ignored
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (2,3)",
|
|
|
|
|
|
},
|
2021-11-09 14:35:36 +00:00
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
TeamID: ptr.Uint(1),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
TeamID: ptr.Uint(1),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id = 1",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
|
|
|
|
|
IncludeObserver: false,
|
|
|
|
|
|
TeamID: ptr.Uint(1),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleAdmin)},
|
|
|
|
|
|
IncludeObserver: false,
|
|
|
|
|
|
TeamID: ptr.Uint(1),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id = 1",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
TeamID: ptr.Uint(3),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
TeamID: ptr.Uint(2),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id = 2",
|
|
|
|
|
|
},
|
2021-05-25 04:34:08 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, tt := range testCases {
|
|
|
|
|
|
tt := tt
|
|
|
|
|
|
t.Run("", func(t *testing.T) {
|
|
|
|
|
|
ds := &Datastore{logger: log.NewNopLogger()}
|
|
|
|
|
|
sql := ds.whereFilterHostsByTeams(tt.filter, "hosts")
|
|
|
|
|
|
assert.Equal(t, tt.expected, sql)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-05-27 20:18:00 +00:00
|
|
|
|
|
|
|
|
|
|
func TestWhereOmitIDs(t *testing.T) {
|
|
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
|
|
|
|
testCases := []struct {
|
|
|
|
|
|
omits []uint
|
|
|
|
|
|
expected string
|
|
|
|
|
|
}{
|
|
|
|
|
|
{
|
|
|
|
|
|
omits: nil,
|
|
|
|
|
|
expected: "TRUE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
omits: []uint{},
|
|
|
|
|
|
expected: "TRUE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
omits: []uint{1, 3, 4},
|
|
|
|
|
|
expected: "id NOT IN (1,3,4)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
omits: []uint{42},
|
|
|
|
|
|
expected: "id NOT IN (42)",
|
|
|
|
|
|
},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, tt := range testCases {
|
|
|
|
|
|
tt := tt
|
|
|
|
|
|
t.Run("", func(t *testing.T) {
|
|
|
|
|
|
ds := &Datastore{logger: log.NewNopLogger()}
|
|
|
|
|
|
sql := ds.whereOmitIDs("id", tt.omits)
|
|
|
|
|
|
assert.Equal(t, tt.expected, sql)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-09-07 16:48:04 +00:00
|
|
|
|
|
|
|
|
|
|
func TestWithRetryTxWithRollback(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnError(errors.New("let's rollback!"))
|
|
|
|
|
|
mock.ExpectRollback()
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
assert.Error(t, ds.withRetryTxx(context.Background(), func(tx sqlx.ExtContext) error {
|
|
|
|
|
|
_, err := tx.ExecContext(context.Background(), "SELECT 1")
|
2021-09-07 16:48:04 +00:00
|
|
|
|
return err
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithRetryTxWillRollbackWhenPanic(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
2022-12-05 22:50:49 +00:00
|
|
|
|
defer func() { recover() }() //nolint:errcheck
|
2021-09-07 16:48:04 +00:00
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnError(errors.New("let's rollback!"))
|
|
|
|
|
|
mock.ExpectRollback()
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
assert.Error(t, ds.withRetryTxx(context.Background(), func(tx sqlx.ExtContext) error {
|
2021-09-07 16:48:04 +00:00
|
|
|
|
panic("ROLLBACK")
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithTxWithRollback(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnError(errors.New("let's rollback!"))
|
|
|
|
|
|
mock.ExpectRollback()
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
assert.Error(t, ds.withTx(context.Background(), func(tx sqlx.ExtContext) error {
|
|
|
|
|
|
_, err := tx.ExecContext(context.Background(), "SELECT 1")
|
2021-09-07 16:48:04 +00:00
|
|
|
|
return err
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestWithTxWillRollbackWhenPanic(t *testing.T) {
|
|
|
|
|
|
mock, ds := mockDatastore(t)
|
|
|
|
|
|
defer ds.Close()
|
2022-12-05 22:50:49 +00:00
|
|
|
|
defer func() { recover() }() //nolint:errcheck
|
2021-09-07 16:48:04 +00:00
|
|
|
|
|
|
|
|
|
|
mock.ExpectBegin()
|
|
|
|
|
|
mock.ExpectExec("SELECT 1").WillReturnError(errors.New("let's rollback!"))
|
|
|
|
|
|
mock.ExpectRollback()
|
|
|
|
|
|
|
2021-09-14 14:44:02 +00:00
|
|
|
|
assert.Error(t, ds.withTx(context.Background(), func(tx sqlx.ExtContext) error {
|
2021-09-07 16:48:04 +00:00
|
|
|
|
panic("ROLLBACK")
|
|
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
|
|
|
require.NoError(t, mock.ExpectationsWereMet())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestNewReadsPasswordFromDisk(t *testing.T) {
|
|
|
|
|
|
passwordFile, err := os.CreateTemp(t.TempDir(), "*.passwordtest")
|
|
|
|
|
|
require.NoError(t, err)
|
2025-02-18 21:28:54 +00:00
|
|
|
|
_, err = passwordFile.WriteString(testing_utils.TestPassword)
|
2021-09-07 16:48:04 +00:00
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
passwordPath := passwordFile.Name()
|
|
|
|
|
|
require.NoError(t, passwordFile.Close())
|
|
|
|
|
|
|
|
|
|
|
|
dbName := t.Name()
|
|
|
|
|
|
|
|
|
|
|
|
// Create a datastore client in order to run migrations as usual
|
|
|
|
|
|
mysqlConfig := config.MysqlConfig{
|
2025-02-18 21:28:54 +00:00
|
|
|
|
Username: testing_utils.TestUsername,
|
2021-09-07 16:48:04 +00:00
|
|
|
|
Password: "",
|
|
|
|
|
|
PasswordPath: passwordPath,
|
2025-02-18 21:28:54 +00:00
|
|
|
|
Address: testing_utils.TestAddress,
|
2021-09-07 16:48:04 +00:00
|
|
|
|
Database: dbName,
|
|
|
|
|
|
}
|
|
|
|
|
|
ds, err := newDSWithConfig(t, dbName, mysqlConfig)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
defer ds.Close()
|
|
|
|
|
|
require.NoError(t, ds.HealthCheck())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func newDSWithConfig(t *testing.T, dbName string, config config.MysqlConfig) (*Datastore, error) {
|
|
|
|
|
|
db, err := sql.Open(
|
|
|
|
|
|
"mysql",
|
2025-02-18 21:28:54 +00:00
|
|
|
|
fmt.Sprintf("%s:%s@tcp(%s)/?multiStatements=true", testing_utils.TestUsername, testing_utils.TestPassword,
|
|
|
|
|
|
testing_utils.TestAddress),
|
2021-09-07 16:48:04 +00:00
|
|
|
|
)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
_, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s; CREATE DATABASE %s;", dbName, dbName))
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
|
|
|
|
ds, err := New(config, clock.NewMockClock(), Logger(log.NewNopLogger()), LimitAttempts(1))
|
|
|
|
|
|
return ds, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func generateTestCert(t *testing.T) (string, string) {
|
|
|
|
|
|
privateKeyCA, err := rsa.GenerateKey(rand.Reader, 1024)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
|
|
|
|
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
|
|
|
|
|
|
serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
template := x509.Certificate{
|
|
|
|
|
|
SerialNumber: serialNumber,
|
|
|
|
|
|
Subject: pkix.Name{
|
|
|
|
|
|
Organization: []string{"aa"},
|
|
|
|
|
|
},
|
|
|
|
|
|
NotBefore: time.Now().Add(-1 * time.Duration(24) * time.Hour),
|
|
|
|
|
|
NotAfter: time.Now().Add(24 * time.Hour),
|
|
|
|
|
|
IsCA: true,
|
|
|
|
|
|
KeyUsage: x509.KeyUsageDigitalSignature,
|
|
|
|
|
|
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
|
|
|
|
|
|
BasicConstraintsValid: true,
|
|
|
|
|
|
IPAddresses: []net.IP{net.ParseIP("127.0.0.1")},
|
|
|
|
|
|
}
|
|
|
|
|
|
derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, &privateKeyCA.PublicKey, privateKeyCA)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
|
|
|
|
publicPem, err := os.CreateTemp(t.TempDir(), "*-ca.pem")
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.NoError(t, pem.Encode(publicPem, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes}))
|
|
|
|
|
|
require.NoError(t, publicPem.Close())
|
|
|
|
|
|
|
|
|
|
|
|
keyPem, err := os.CreateTemp(t.TempDir(), "*-key.pem")
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
privateKeyBytes := x509.MarshalPKCS1PrivateKey(privateKeyCA)
|
|
|
|
|
|
require.NoError(t, pem.Encode(keyPem, &pem.Block{Type: "RSA PRIVATE KEY", Bytes: privateKeyBytes}))
|
|
|
|
|
|
require.NoError(t, keyPem.Close())
|
|
|
|
|
|
|
|
|
|
|
|
return publicPem.Name(), keyPem.Name()
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func TestNewUsesRegisterTLS(t *testing.T) {
|
|
|
|
|
|
dbName := t.Name()
|
|
|
|
|
|
|
|
|
|
|
|
ca, _ := generateTestCert(t)
|
|
|
|
|
|
cert, key := generateTestCert(t)
|
|
|
|
|
|
|
|
|
|
|
|
mysqlConfig := config.MysqlConfig{
|
2025-02-18 21:28:54 +00:00
|
|
|
|
Username: testing_utils.TestUsername,
|
|
|
|
|
|
Password: testing_utils.TestPassword,
|
|
|
|
|
|
Address: testing_utils.TestAddress,
|
2021-09-07 16:48:04 +00:00
|
|
|
|
Database: dbName,
|
|
|
|
|
|
TLSCA: ca,
|
|
|
|
|
|
TLSCert: cert,
|
|
|
|
|
|
TLSKey: key,
|
|
|
|
|
|
}
|
|
|
|
|
|
// This fails because the certificate mysql is using is different than the one generated here
|
|
|
|
|
|
_, err := newDSWithConfig(t, dbName, mysqlConfig)
|
|
|
|
|
|
require.Error(t, err)
|
2022-04-29 12:31:58 +00:00
|
|
|
|
// TODO: we're using a Regexp because the message is different depending on the version of mysql,
|
|
|
|
|
|
// we should refactor and use different error types instead.
|
2024-05-23 19:23:38 +00:00
|
|
|
|
require.Regexp(t, "(x509|tls|EOF)", err.Error())
|
2021-09-07 16:48:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2021-12-31 17:16:25 +00:00
|
|
|
|
func TestWhereFilterTeams(t *testing.T) {
|
2021-09-07 16:48:04 +00:00
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
|
|
|
|
testCases := []struct {
|
|
|
|
|
|
filter fleet.TeamFilter
|
|
|
|
|
|
expected string
|
|
|
|
|
|
}{
|
|
|
|
|
|
// No teams or global role
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{User: nil},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleAdmin)},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "TRUE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
|
|
|
|
|
IncludeObserver: false,
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "TRUE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{User: &fleet.User{Teams: []fleet.UserTeam{{Team: fleet.Team{ID: 1}, Role: fleet.RoleAdmin}}}},
|
|
|
|
|
|
expected: "t.id IN (1)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{User: &fleet.User{Teams: []fleet.UserTeam{{Team: fleet.Team{ID: 1}, Role: fleet.RoleMaintainer}}}},
|
|
|
|
|
|
expected: "t.id IN (1)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{User: &fleet.User{Teams: []fleet.UserTeam{{Team: fleet.Team{ID: 1}, Role: fleet.RoleObserver}}}},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{Teams: []fleet.UserTeam{{Team: fleet.Team{ID: 1}, Role: fleet.RoleMaintainer}}},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "t.id IN (1)",
|
|
|
|
|
|
},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, tt := range testCases {
|
|
|
|
|
|
tt := tt
|
|
|
|
|
|
t.Run("", func(t *testing.T) {
|
|
|
|
|
|
ds := &Datastore{logger: log.NewNopLogger()}
|
|
|
|
|
|
sql := ds.whereFilterTeams(tt.filter, "t")
|
|
|
|
|
|
assert.Equal(t, tt.expected, sql)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-11-22 17:06:12 +00:00
|
|
|
|
|
|
|
|
|
|
func TestCompareVersions(t *testing.T) {
|
|
|
|
|
|
for _, tc := range []struct {
|
|
|
|
|
|
name string
|
|
|
|
|
|
|
2021-12-08 22:50:00 +00:00
|
|
|
|
v1 []int64
|
|
|
|
|
|
v2 []int64
|
|
|
|
|
|
knownUnknowns map[int64]struct{}
|
2021-11-22 17:06:12 +00:00
|
|
|
|
|
|
|
|
|
|
expMissing []int64
|
|
|
|
|
|
expUnknown []int64
|
|
|
|
|
|
expEqual bool
|
|
|
|
|
|
}{
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "both-empty",
|
|
|
|
|
|
v1: nil,
|
|
|
|
|
|
v2: nil,
|
|
|
|
|
|
expEqual: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "equal",
|
|
|
|
|
|
v1: []int64{1, 2, 3},
|
|
|
|
|
|
v2: []int64{1, 2, 3},
|
|
|
|
|
|
expEqual: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "equal-out-of-order",
|
|
|
|
|
|
v1: []int64{1, 2, 3},
|
|
|
|
|
|
v2: []int64{1, 3, 2},
|
|
|
|
|
|
expEqual: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "empty-with-unknown",
|
|
|
|
|
|
v1: nil,
|
|
|
|
|
|
v2: []int64{1},
|
|
|
|
|
|
expEqual: false,
|
|
|
|
|
|
expUnknown: []int64{1},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "empty-with-missing",
|
|
|
|
|
|
v1: []int64{1},
|
|
|
|
|
|
v2: nil,
|
|
|
|
|
|
expEqual: false,
|
|
|
|
|
|
expMissing: []int64{1},
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "missing",
|
|
|
|
|
|
v1: []int64{1, 2, 3},
|
|
|
|
|
|
v2: []int64{1, 3},
|
|
|
|
|
|
expMissing: []int64{2},
|
|
|
|
|
|
expEqual: false,
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "unknown",
|
|
|
|
|
|
v1: []int64{1, 2, 3},
|
|
|
|
|
|
v2: []int64{1, 2, 3, 4},
|
|
|
|
|
|
expUnknown: []int64{4},
|
|
|
|
|
|
expEqual: false,
|
|
|
|
|
|
},
|
2021-12-08 22:50:00 +00:00
|
|
|
|
{
|
|
|
|
|
|
name: "known-unknown",
|
|
|
|
|
|
v1: []int64{1, 2, 3},
|
|
|
|
|
|
v2: []int64{1, 2, 3, 4},
|
|
|
|
|
|
knownUnknowns: map[int64]struct{}{
|
|
|
|
|
|
4: {},
|
|
|
|
|
|
},
|
|
|
|
|
|
expEqual: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "unknowns",
|
|
|
|
|
|
v1: []int64{1, 2, 3},
|
|
|
|
|
|
v2: []int64{1, 2, 3, 4, 5},
|
|
|
|
|
|
expUnknown: []int64{5},
|
|
|
|
|
|
knownUnknowns: map[int64]struct{}{
|
|
|
|
|
|
4: {},
|
|
|
|
|
|
},
|
|
|
|
|
|
expEqual: false,
|
|
|
|
|
|
},
|
2021-11-22 17:06:12 +00:00
|
|
|
|
{
|
|
|
|
|
|
name: "missing-and-unknown",
|
|
|
|
|
|
v1: []int64{1, 2, 3},
|
|
|
|
|
|
v2: []int64{1, 2, 4},
|
|
|
|
|
|
expMissing: []int64{3},
|
|
|
|
|
|
expUnknown: []int64{4},
|
|
|
|
|
|
expEqual: false,
|
|
|
|
|
|
},
|
|
|
|
|
|
} {
|
|
|
|
|
|
t.Run(tc.name, func(t *testing.T) {
|
2021-12-08 22:50:00 +00:00
|
|
|
|
missing, unknown, equal := compareVersions(tc.v1, tc.v2, tc.knownUnknowns)
|
2021-11-22 17:06:12 +00:00
|
|
|
|
require.Equal(t, tc.expMissing, missing)
|
|
|
|
|
|
require.Equal(t, tc.expUnknown, unknown)
|
|
|
|
|
|
require.Equal(t, tc.expEqual, equal)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-12-31 17:16:25 +00:00
|
|
|
|
|
2022-02-03 17:56:22 +00:00
|
|
|
|
func TestDebugs(t *testing.T) {
|
|
|
|
|
|
ds := CreateMySQLDS(t)
|
|
|
|
|
|
|
|
|
|
|
|
status, err := ds.InnoDBStatus(context.Background())
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
assert.NotEmpty(t, status)
|
|
|
|
|
|
|
|
|
|
|
|
processList, err := ds.ProcessList(context.Background())
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.Greater(t, len(processList), 0)
|
|
|
|
|
|
}
|
2022-07-20 16:10:03 +00:00
|
|
|
|
|
2024-02-09 18:06:36 +00:00
|
|
|
|
func TestWantedModesEnabled(t *testing.T) {
|
2022-07-20 16:10:03 +00:00
|
|
|
|
ds := CreateMySQLDS(t)
|
|
|
|
|
|
|
|
|
|
|
|
var sqlMode string
|
2023-06-19 17:55:15 +00:00
|
|
|
|
err := ds.writer(context.Background()).GetContext(context.Background(), &sqlMode, `SELECT @@SQL_MODE`)
|
2022-07-20 16:10:03 +00:00
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.Contains(t, sqlMode, "ANSI_QUOTES")
|
2024-02-09 18:06:36 +00:00
|
|
|
|
require.Contains(t, sqlMode, "ONLY_FULL_GROUP_BY")
|
2022-07-20 16:10:03 +00:00
|
|
|
|
}
|
2023-05-18 14:01:57 +00:00
|
|
|
|
|
|
|
|
|
|
func Test_buildWildcardMatchPhrase(t *testing.T) {
|
|
|
|
|
|
type args struct {
|
|
|
|
|
|
matchQuery string
|
|
|
|
|
|
}
|
|
|
|
|
|
tests := []struct {
|
|
|
|
|
|
name string
|
|
|
|
|
|
args args
|
|
|
|
|
|
want string
|
|
|
|
|
|
}{
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "",
|
|
|
|
|
|
args: args{matchQuery: "test"},
|
|
|
|
|
|
want: "%test%",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "underscores are escaped",
|
|
|
|
|
|
args: args{matchQuery: "Host_1"},
|
|
|
|
|
|
want: "%Host\\_1%",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "percent are escaped",
|
|
|
|
|
|
args: args{matchQuery: "Host%1"},
|
|
|
|
|
|
want: "%Host\\%1%",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "percent & underscore are escaped",
|
|
|
|
|
|
args: args{matchQuery: "Host_%1"},
|
|
|
|
|
|
want: "%Host\\_\\%1%",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "underscores added for wildcard search are not escaped",
|
|
|
|
|
|
args: args{matchQuery: "Alice‘s MacbookPro"},
|
|
|
|
|
|
want: "%Alice_s MacbookPro%",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "underscores added for wildcard search are not escaped, but underscores in matchQuery are",
|
|
|
|
|
|
args: args{matchQuery: "Alice‘s Macbook_Pro"},
|
|
|
|
|
|
want: "%Alice_s Macbook\\_Pro%",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
|
|
|
|
|
name: "multiple occurances of wildcard are not escaped",
|
|
|
|
|
|
args: args{matchQuery: "Alice‘‘s Macbook_Pro"},
|
|
|
|
|
|
want: "%Alice__s Macbook\\_Pro%",
|
|
|
|
|
|
},
|
|
|
|
|
|
}
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
|
|
|
|
|
assert.Equalf(t, tt.want, buildWildcardMatchPhrase(tt.args.matchQuery), "buildWildcardMatchPhrase(%v)", tt.args.matchQuery)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-02-15 20:22:27 +00:00
|
|
|
|
|
2025-12-30 03:28:45 +00:00
|
|
|
|
func TestWhereFilterTeamWithGlobalStats(t *testing.T) {
|
2024-02-15 20:22:27 +00:00
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
|
|
|
|
testCases := []struct {
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name string
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter fleet.TeamFilter
|
|
|
|
|
|
expected string
|
|
|
|
|
|
}{
|
|
|
|
|
|
// No teams or global role
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "empty user",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "empty user teams",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{Teams: []fleet.UserTeam{}},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
|
|
// Global role
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "global admin",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleAdmin)},
|
|
|
|
|
|
},
|
2024-07-30 17:19:05 +00:00
|
|
|
|
expected: "hosts.team_id = 0 AND hosts.global_stats = 1",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "global maintainer",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleMaintainer)},
|
|
|
|
|
|
},
|
2024-07-30 17:19:05 +00:00
|
|
|
|
expected: "hosts.team_id = 0 AND hosts.global_stats = 1",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "global observer",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "global observer include",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
},
|
2024-07-30 17:19:05 +00:00
|
|
|
|
expected: "hosts.team_id = 0 AND hosts.global_stats = 1",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
|
|
// Team roles
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "team observer",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "team observer include",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (1)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "multi team observer",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "multi team maintainer and observer",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (2)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "multi team maintainer and observer include",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (1,2)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "multi team maintainer and observer with invalid role",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
// Invalid role should be ignored
|
|
|
|
|
|
{Role: "bad", Team: fleet.Team{ID: 37}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (2)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "multi team maintainer and observer and admin",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
{Role: fleet.RoleAdmin, Team: fleet.Team{ID: 3}},
|
|
|
|
|
|
// Invalid role should be ignored
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id IN (2,3)",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "team id only",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
TeamID: ptr.Uint(1),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "team id with observer include",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
|
|
|
|
|
IncludeObserver: true,
|
|
|
|
|
|
TeamID: ptr.Uint(1),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id = 1",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "team id with observer exclude",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleObserver)},
|
|
|
|
|
|
IncludeObserver: false,
|
|
|
|
|
|
TeamID: ptr.Uint(1),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "team id with admin exclude observer",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{GlobalRole: ptr.String(fleet.RoleAdmin)},
|
|
|
|
|
|
IncludeObserver: false,
|
|
|
|
|
|
TeamID: ptr.Uint(1),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id = 1",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "team id not in multiple team roles",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
TeamID: ptr.Uint(3),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "FALSE",
|
|
|
|
|
|
},
|
|
|
|
|
|
{
|
2024-07-30 17:19:05 +00:00
|
|
|
|
name: "team id in multiple team roles",
|
2024-02-15 20:22:27 +00:00
|
|
|
|
filter: fleet.TeamFilter{
|
|
|
|
|
|
User: &fleet.User{
|
|
|
|
|
|
Teams: []fleet.UserTeam{
|
|
|
|
|
|
{Role: fleet.RoleObserver, Team: fleet.Team{ID: 1}},
|
|
|
|
|
|
{Role: fleet.RoleMaintainer, Team: fleet.Team{ID: 2}},
|
|
|
|
|
|
},
|
|
|
|
|
|
},
|
|
|
|
|
|
TeamID: ptr.Uint(2),
|
|
|
|
|
|
},
|
|
|
|
|
|
expected: "hosts.team_id = 2",
|
|
|
|
|
|
},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, tt := range testCases {
|
|
|
|
|
|
tt := tt
|
2024-07-30 17:19:05 +00:00
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2024-02-15 20:22:27 +00:00
|
|
|
|
t.Parallel()
|
|
|
|
|
|
ds := &Datastore{logger: log.NewNopLogger()}
|
2025-12-30 03:28:45 +00:00
|
|
|
|
sql := ds.whereFilterTeamWithGlobalStats(tt.filter, "hosts")
|
2024-02-15 20:22:27 +00:00
|
|
|
|
assert.Equal(t, tt.expected, sql)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-03-27 13:44:22 +00:00
|
|
|
|
|
|
|
|
|
|
func TestBatchProcessDB(t *testing.T) {
|
|
|
|
|
|
type testData struct {
|
|
|
|
|
|
id int
|
|
|
|
|
|
value string
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
payload := []interface{}{
|
|
|
|
|
|
&testData{id: 1, value: "a"},
|
|
|
|
|
|
&testData{id: 2, value: "b"},
|
|
|
|
|
|
&testData{id: 3, value: "c"},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
generateValueArgs := func(item interface{}) (string, []any) {
|
|
|
|
|
|
p := item.(*testData)
|
|
|
|
|
|
valuePart := "(?, ?),"
|
|
|
|
|
|
args := []any{p.id, p.value}
|
|
|
|
|
|
return valuePart, args
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
t.Run("TestEmptyPayload", func(t *testing.T) {
|
|
|
|
|
|
executeBatch := func(valuePart string, args []any) error {
|
|
|
|
|
|
return errors.New("execute shouldn't be called for an empty payload")
|
|
|
|
|
|
}
|
|
|
|
|
|
err := batchProcessDB([]interface{}{}, 1000, generateValueArgs, executeBatch)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
t.Run("TestSingleBatch", func(t *testing.T) {
|
|
|
|
|
|
callCount := 0
|
|
|
|
|
|
executeBatch := func(valuePart string, args []any) error {
|
|
|
|
|
|
callCount++
|
|
|
|
|
|
require.Equal(t, 2, len(args)/2) // each item adds 2 args
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
err := batchProcessDB(payload[:2], 2, generateValueArgs, executeBatch)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.Equal(t, 1, callCount)
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
t.Run("TestMultipleBatches", func(t *testing.T) {
|
|
|
|
|
|
callCount := 0
|
|
|
|
|
|
executeBatch := func(valuePart string, args []any) error {
|
|
|
|
|
|
callCount++
|
|
|
|
|
|
require.Equal(t, 2/callCount, len(args)/2) // each item adds 2 args
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
err := batchProcessDB(payload, 2, generateValueArgs, executeBatch)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.Equal(t, 2, callCount)
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
2024-08-09 14:59:24 +00:00
|
|
|
|
|
|
|
|
|
|
func TestGetContextTryStmt(t *testing.T) {
|
|
|
|
|
|
ctx := context.Background()
|
|
|
|
|
|
|
|
|
|
|
|
dbMock, ds := mockDatastore(t)
|
|
|
|
|
|
ds.stmtCache = map[string]*sqlx.Stmt{}
|
|
|
|
|
|
|
|
|
|
|
|
t.Run("get with unknown statement error", func(t *testing.T) {
|
|
|
|
|
|
count := 0
|
|
|
|
|
|
query := "SELECT 1"
|
|
|
|
|
|
|
|
|
|
|
|
// first call to cache the statement
|
|
|
|
|
|
dbMock.ExpectPrepare(query)
|
|
|
|
|
|
mockResult := sqlmock.NewRows([]string{query})
|
|
|
|
|
|
mockResult.AddRow("1")
|
|
|
|
|
|
dbMock.ExpectQuery(query).WillReturnRows(mockResult)
|
|
|
|
|
|
err := ds.getContextTryStmt(ctx, &count, query)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.NoError(t, dbMock.ExpectationsWereMet())
|
|
|
|
|
|
|
|
|
|
|
|
// verify that the statement was cached
|
|
|
|
|
|
stmt := ds.loadOrPrepareStmt(ctx, query)
|
|
|
|
|
|
require.NotNil(t, stmt)
|
|
|
|
|
|
|
|
|
|
|
|
// call again to trigger the unknown statement error and ensure it retries
|
|
|
|
|
|
// first query, make it fail
|
|
|
|
|
|
queryMock := dbMock.ExpectQuery(query)
|
|
|
|
|
|
mySQLErr := &mysql.MySQLError{
|
|
|
|
|
|
Number: mysqlerr.ER_UNKNOWN_STMT_HANDLER,
|
|
|
|
|
|
}
|
|
|
|
|
|
queryMock.WillReturnError(mySQLErr)
|
|
|
|
|
|
|
|
|
|
|
|
// after the failure, a second call is made, this time without
|
|
|
|
|
|
// the prepared statement
|
|
|
|
|
|
mockResult = sqlmock.NewRows([]string{query})
|
|
|
|
|
|
mockResult.AddRow("1")
|
|
|
|
|
|
dbMock.ExpectQuery(query).WillReturnRows(mockResult)
|
|
|
|
|
|
|
|
|
|
|
|
// make the call and verify we removed the prepared statement
|
|
|
|
|
|
err = ds.getContextTryStmt(ctx, &count, query)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.NoError(t, dbMock.ExpectationsWereMet())
|
|
|
|
|
|
stmt = ds.loadOrPrepareStmt(ctx, query)
|
|
|
|
|
|
require.Nil(t, stmt)
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
t.Run("get with other error", func(t *testing.T) {
|
|
|
|
|
|
dbMock, ds := mockDatastore(t)
|
|
|
|
|
|
ds.stmtCache = map[string]*sqlx.Stmt{}
|
|
|
|
|
|
count := 0
|
|
|
|
|
|
query := "SELECT 1"
|
|
|
|
|
|
|
|
|
|
|
|
// first call to cache the statement
|
|
|
|
|
|
dbMock.ExpectPrepare(query)
|
|
|
|
|
|
mockResult := sqlmock.NewRows([]string{query})
|
|
|
|
|
|
mockResult.AddRow("1")
|
|
|
|
|
|
dbMock.ExpectQuery(query).WillReturnRows(mockResult)
|
|
|
|
|
|
err := ds.getContextTryStmt(ctx, &count, query)
|
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
require.Equal(t, 1, count)
|
|
|
|
|
|
require.NoError(t, dbMock.ExpectationsWereMet())
|
|
|
|
|
|
|
|
|
|
|
|
// verify that the statement was cached
|
|
|
|
|
|
stmt := ds.loadOrPrepareStmt(ctx, query)
|
|
|
|
|
|
require.NotNil(t, stmt)
|
|
|
|
|
|
|
|
|
|
|
|
// return a duplicate error
|
|
|
|
|
|
queryMock := dbMock.ExpectQuery(query)
|
|
|
|
|
|
mySQLErr := &mysql.MySQLError{
|
|
|
|
|
|
Number: mysqlerr.ER_DUP_ENTRY,
|
|
|
|
|
|
}
|
|
|
|
|
|
queryMock.WillReturnError(mySQLErr)
|
|
|
|
|
|
|
|
|
|
|
|
count = 0
|
|
|
|
|
|
err = ds.getContextTryStmt(ctx, &count, query)
|
|
|
|
|
|
require.ErrorIs(t, mySQLErr, err)
|
|
|
|
|
|
require.NoError(t, dbMock.ExpectationsWereMet())
|
|
|
|
|
|
stmt = ds.loadOrPrepareStmt(ctx, query)
|
|
|
|
|
|
require.NotNil(t, stmt)
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
}
|