Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Kind can be one of:
# - breaking-change: a change to previously-documented behavior
# - deprecation: functionality that is being removed in a later release
# - bug-fix: fixes a problem in a previous version
# - enhancement: extends functionality but does not break or fix existing behavior
# - feature: new functionality
# - known-issue: problems that we are aware of in a given version
# - security: impacts on the security of a product or a user’s deployment.
# - upgrade: important information for someone upgrading from a prior version
# - other: does not fit into any of the other categories
kind: enhancement

# Change summary; a 80ish characters long description of the change.
summary: Makes file storage size configurable

# Long description; in case the summary is not enough to describe the change
# this field accommodate a description without length limits.
# NOTE: This field will be rendered only for breaking-change and known-issue kinds at the moment.
#description:

# Affected component; a word indicating the component this changeset affects.
component: fleet-server

# PR URL; optional; the PR number that added the changeset.
# If not present is automatically filled by the tooling finding the PR where this changelog fragment has been added.
# NOTE: the tooling supports backports, so it's able to fill the original PR number instead of the backport PR number.
# Please provide it if you are adding a fragment for a different PR.
#pr: https://github.com/owner/repo/1234

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
#pr: https://github.com/owner/repo/1234
pr: https://github.com/elastic/fleet-server/pull/5478


# Issue URL; optional; the GitHub issue related to this changeset (either closes or is part of).
# If not present is automatically filled by the tooling with the issue linked to the PR number.
#issue: https://github.com/owner/repo/1234
9 changes: 9 additions & 0 deletions internal/pkg/api/error.go
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,15 @@ func NewHTTPErrResp(err error) HTTPErrResp {
zerolog.InfoLevel,
},
},
{
uploader.ErrFeatureDisabled,
HTTPErrResp{
http.StatusForbidden,
"ErrFileFeatureDisable",
"File Transfer is disabled in Fleet Server configuration",
zerolog.WarnLevel,
},
},
{
uploader.ErrMissingChunks,
HTTPErrResp{
Expand Down
2 changes: 1 addition & 1 deletion internal/pkg/api/handleFileDelivery.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ func NewFileDeliveryT(cfg *config.Server, bulker bulk.Bulk, chunkClient *elastic
return &FileDeliveryT{
bulker: bulker,
cache: cache,
deliverer: delivery.New(chunkClient, bulker, maxFileSize),
deliverer: delivery.New(chunkClient, bulker, cfg.Limits.MaxFileStorageByteSize),
authAgent: authAgent,
}
}
Expand Down
2 changes: 1 addition & 1 deletion internal/pkg/api/handleFileDelivery_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ func prepareFileDeliveryMock(t *testing.T) (http.Handler, apiServer, *MockTransp
ft: &FileDeliveryT{
bulker: fakebulk,
cache: c,
deliverer: delivery.New(mockES, fakebulk, maxFileSize),
deliverer: delivery.New(mockES, fakebulk, nil),
authAgent: func(r *http.Request, id *string, bulker bulk.Bulk, c cache.Cache) (*model.Agent, error) {
return &model.Agent{
ESDocument: model.ESDocument{
Expand Down
4 changes: 1 addition & 3 deletions internal/pkg/api/handleUpload.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ import (
)

const (
// TODO: move to a config
maxFileSize = 104857600 // 100 MiB
maxUploadTimer = 24 * time.Hour
)

Expand All @@ -58,7 +56,7 @@ func NewUploadT(cfg *config.Server, bulker bulk.Bulk, chunkClient *elasticsearch
chunkClient: chunkClient,
bulker: bulker,
cache: cache,
uploader: uploader.New(chunkClient, bulker, cache, maxFileSize, maxUploadTimer),
uploader: uploader.New(chunkClient, bulker, cache, cfg.Limits.MaxFileStorageByteSize, maxUploadTimer),
authAgent: authAgent,
authAPIKey: authAPIKey,
}
Expand Down
80 changes: 61 additions & 19 deletions internal/pkg/api/handleUpload_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
Expand Down Expand Up @@ -71,18 +72,6 @@ func TestUploadBeginValidation(t *testing.T) {
"src": "agent"
}`,
},
{"Oversized file should be rejected", http.StatusBadRequest, "size",
`{
"file": {
"size": ` + strconv.Itoa(maxFileSize+1024) + `,
"name": "foo.png",
"mime_type": "image/png"
},
"agent_id": "foo",
"action_id": "123",
"src": "agent"
}`,
},
{"zero size file should be rejected", http.StatusBadRequest, "size",
`{
"file": {
Expand Down Expand Up @@ -346,6 +335,50 @@ func TestUploadBeginBadRequest(t *testing.T) {
assert.Equal(t, http.StatusBadRequest, rec.Code)
}

func TestUploadBeginFileSize(t *testing.T) {

mockFile := func(size int64) string {
return fmt.Sprintf(`{
"file": {
"size": %d,
"name": "foo.png",
"mime_type": "image/png"
},
"agent_id": "foo",
"action_id": "123",
"src": "agent"
}`, size)
}

// now test various body contents
tests := []struct {
Name string
MaxSize *uint64
ExpectStatus int
InputSize int64
}{
{"MaxSize nil allows uploads", nil, http.StatusOK, 1000},
{"MaxSize nil allows large uploads", nil, http.StatusOK, 1024 * 1024 * 1024 * 2},
{"MaxSize nil does not allow 0-length files", nil, http.StatusBadRequest, 0},
{"MaxSize 0 does not allow uploads", size_ptr(0), http.StatusForbidden, 1000},
{"MaxSize 0 does not allow 0-sized uploads", size_ptr(0), http.StatusForbidden, 0},
{"Sizes larger than MaxSize are denied", size_ptr(1024), http.StatusBadRequest, 2048},
{"Sizes smaller than MaxSize are allowed", size_ptr(1024), http.StatusOK, 900},
}

for _, tc := range tests {
t.Run(tc.Name, func(t *testing.T) {

hr, _, _, _ := configureUploaderMock(t, tc.MaxSize)
rec := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodPost, RouteUploadBegin, strings.NewReader(mockFile(tc.InputSize)))
hr.ServeHTTP(rec, req)
assert.Equal(t, tc.ExpectStatus, rec.Code)
})
}

}

/*
Chunk data upload route
*/
Expand Down Expand Up @@ -377,7 +410,7 @@ func TestChunkUploadRouteParams(t *testing.T) {
mockUploadInfoResult(fakebulk, file.Info{
DocID: "bar.foo",
ID: mockUploadID,
ChunkSize: maxFileSize,
ChunkSize: file.MaxChunkSize,
Total: file.MaxChunkSize + 1,
Count: 2, // this is a 2-chunk "file" based on size above
Start: time.Now(),
Expand Down Expand Up @@ -410,7 +443,7 @@ func TestChunkUploadRequiresChunkHashHeader(t *testing.T) {
mockUploadInfoResult(fakebulk, file.Info{
DocID: "bar.foo",
ID: mockUploadID,
ChunkSize: maxFileSize,
ChunkSize: file.MaxChunkSize,
Total: 10,
Count: 1,
Start: time.Now(),
Expand Down Expand Up @@ -458,7 +491,7 @@ func TestChunkUploadStatus(t *testing.T) {
mockUploadInfoResult(fakebulk, file.Info{
DocID: "bar.foo",
ID: mockUploadID,
ChunkSize: maxFileSize,
ChunkSize: file.MaxChunkSize,
Total: 10,
Count: 1,
Start: time.Now(),
Expand Down Expand Up @@ -509,7 +542,7 @@ func TestChunkUploadExpiry(t *testing.T) {
mockUploadInfoResult(fakebulk, file.Info{
DocID: "bar.foo",
ID: mockUploadID,
ChunkSize: maxFileSize,
ChunkSize: file.MaxChunkSize,
Total: 10,
Count: 1,
Start: tc.StartTime,
Expand Down Expand Up @@ -547,7 +580,7 @@ func TestChunkUploadWritesTimestamp(t *testing.T) {
mockUploadInfoResult(fakebulk, file.Info{
DocID: "bar.foo",
ID: mockUploadID,
ChunkSize: maxFileSize,
ChunkSize: file.MaxChunkSize,
Total: 10,
Count: 1,
Start: time.Now(),
Expand Down Expand Up @@ -597,7 +630,7 @@ func TestUploadCompleteRequiresMatchingAuth(t *testing.T) {
mockInfo := file.Info{
DocID: "bar." + tc.AgentInFileRecord,
ID: mockUploadID,
ChunkSize: maxFileSize,
ChunkSize: file.MaxChunkSize,
Total: 10,
Count: 1,
Start: time.Now().Add(-time.Minute),
Expand Down Expand Up @@ -998,6 +1031,10 @@ func TestUploadCompleteBadRequests(t *testing.T) {

// prepareUploaderMock sets up common dependencies and registers upload routes to a returned router
func prepareUploaderMock(t *testing.T) (http.Handler, apiServer, *itesting.MockBulk, *MockTransport) {
return configureUploaderMock(t, nil)
}

func configureUploaderMock(t *testing.T, fileSize *uint64) (http.Handler, apiServer, *itesting.MockBulk, *MockTransport) {
// chunk index operations skip the bulker in order to send binary docs directly
// so a mock *elasticsearch.Client needs to be be prepared
es, tx := mockESClient(t)
Expand Down Expand Up @@ -1034,7 +1071,7 @@ func prepareUploaderMock(t *testing.T) (http.Handler, apiServer, *itesting.MockB
bulker: fakebulk,
chunkClient: es,
cache: c,
uploader: uploader.New(es, fakebulk, c, maxFileSize, maxUploadTimer),
uploader: uploader.New(es, fakebulk, c, fileSize, maxUploadTimer),
authAgent: func(r *http.Request, id *string, bulker bulk.Bulk, c cache.Cache) (*model.Agent, error) {
return &model.Agent{
ESDocument: model.ESDocument{
Expand Down Expand Up @@ -1192,3 +1229,8 @@ func sendBody(body io.Reader) *http.Response {
},
}
}

func size_ptr(x int) *uint64 {
y := uint64(x) //nolint:gosec // disable G115
return &y
}
7 changes: 4 additions & 3 deletions internal/pkg/config/env_defaults.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ const (
defaultUploadChunkInterval = time.Millisecond * 3
defaultUploadChunkBurst = 5
defaultUploadChunkMax = 10
defaultUploadChunkMaxBody = 1024 * 1024 * 4 // this is also enforced in handler, a chunk MAY NOT be larger than 4 MiB
defaultUploadChunkMaxBody = 1024 * 1024 * 4 // this is also enforced in handler, a chunk MUST NOT be larger than 4 MiB

defaultFileDelivInterval = time.Millisecond * 100
defaultFileDelivBurst = 5
Expand Down Expand Up @@ -141,8 +141,9 @@ type limit struct {
}

type serverLimitDefaults struct {
PolicyThrottle time.Duration `config:"policy_throttle"` // deprecated: replaced by policy_limit
MaxConnections int `config:"max_connections"`
PolicyThrottle time.Duration `config:"policy_throttle"` // deprecated: replaced by policy_limit
MaxConnections int `config:"max_connections"`
MaxFileStorageByteSize *uint64 `config:"max_file_storage_size"`

ActionLimit limit `config:"action_limit"`
PolicyLimit limit `config:"policy_limit"`
Expand Down
8 changes: 5 additions & 3 deletions internal/pkg/config/limits.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,10 @@ type Limit struct {
}

type ServerLimits struct {
MaxAgents int `config:"max_agents"`
MaxHeaderByteSize int `config:"max_header_byte_size"`
MaxConnections int `config:"max_connections"`
MaxAgents int `config:"max_agents"`
MaxHeaderByteSize int `config:"max_header_byte_size"`
MaxConnections int `config:"max_connections"`
MaxFileStorageByteSize *uint64 `config:"max_file_storage_size"`

ActionLimit Limit `config:"action_limit"`
PolicyLimit Limit `config:"policy_limit"`
Expand Down Expand Up @@ -47,6 +48,7 @@ func (c *ServerLimits) LoadLimits(limits *envLimits) {
if c.MaxConnections == 0 {
c.MaxConnections = l.MaxConnections
}
c.MaxFileStorageByteSize = l.MaxFileStorageByteSize

c.ActionLimit = mergeEnvLimit(c.ActionLimit, l.ActionLimit)
c.PolicyLimit = mergeEnvLimit(c.PolicyLimit, l.PolicyLimit)
Expand Down
4 changes: 2 additions & 2 deletions internal/pkg/file/delivery/delivery.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ var (
)

type Deliverer struct {
sizeLimit int64
sizeLimit *uint64

client *elasticsearch.Client
bulker bulk.Bulk
}

func New(client *elasticsearch.Client, bulker bulk.Bulk, sizeLimit int64) *Deliverer {
func New(client *elasticsearch.Client, bulker bulk.Bulk, sizeLimit *uint64) *Deliverer {
return &Deliverer{
client: client,
bulker: bulker,
Expand Down
23 changes: 12 additions & 11 deletions internal/pkg/file/delivery/delivery_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ func TestFindFile(t *testing.T) {
},
}, nil)

d := New(nil, fakeBulk, -1)
d := New(nil, fakeBulk, nil)

info, err := d.FindFileForAgent(context.Background(), fileID, agentID)
require.NoError(t, err)
Expand All @@ -92,7 +92,7 @@ func TestFindFileHandlesNoResults(t *testing.T) {
},
}, nil)

d := New(nil, fakeBulk, -1)
d := New(nil, fakeBulk, nil)

_, err := d.FindFileForAgent(context.Background(), "somefile", "anyagent")
assert.ErrorIs(t, ErrNoFile, err)
Expand Down Expand Up @@ -132,7 +132,7 @@ func TestLocateChunks(t *testing.T) {
},
}, nil)

d := New(nil, fakeBulk, -1)
d := New(nil, fakeBulk, nil)

chunks, err := d.LocateChunks(context.Background(), zerolog.Logger{}, baseID)
require.NoError(t, err)
Expand All @@ -154,7 +154,7 @@ func TestLocateChunksEmpty(t *testing.T) {
},
}, nil)

d := New(nil, fakeBulk, -1)
d := New(nil, fakeBulk, nil)

_, err := d.LocateChunks(context.Background(), zerolog.Logger{}, "afile")
assert.Error(t, err)
Expand All @@ -172,7 +172,7 @@ func TestSendFile(t *testing.T) {
}
// Chunk data from a tiny PNG, as a full CBOR document
esMock.Response = sendBodyBytes(hexDecode("bf665f696e64657878212e666c6565742d66696c6564656c69766572792d646174612d656e64706f696e74635f6964654142432e30685f76657273696f6e02675f7365715f6e6f016d5f7072696d6172795f7465726d0165666f756e64f5666669656c6473bf64646174619f586789504e470d0a1a0a0000000d494844520000010000000100010300000066bc3a2500000003504c5445b5d0d0630416ea0000001f494441546881edc1010d000000c2a0f74f6d0e37a00000000000000000be0d210000019a60e1d50000000049454e44ae426082ffffff")) //nolint:bodyclose // nopcloser is used, linter does not see it
d := New(esClient, fakeBulk, -1)
d := New(esClient, fakeBulk, nil)
err := d.SendFile(context.Background(), zerolog.Logger{}, buf, chunks, fileID)
require.NoError(t, err)

Expand Down Expand Up @@ -208,7 +208,7 @@ func TestSendFileMultipleChunks(t *testing.T) {
}
}

d := New(esClient, fakeBulk, -1)
d := New(esClient, fakeBulk, nil)
err := d.SendFile(context.Background(), zerolog.Logger{}, buf, chunks, fileID)
require.NoError(t, err)

Expand Down Expand Up @@ -237,18 +237,19 @@ func TestSendFileMultipleChunksUsesBackingIndex(t *testing.T) {
esMock.RoundTripFn = func(req *http.Request) (*http.Response, error) {
parts := strings.Split(req.URL.Path, "/") // ["", ".fleet-filedelivery-data-endpoint-0001", "_doc", "xyz.1"]

if parts[3] == fileID+".0" {
switch parts[3] {
case fileID + ".0":
assert.Equal(t, idx1, parts[1])
} else if parts[3] == fileID+".1" {
case fileID + ".1":
assert.Equal(t, idx2, parts[1])
} else {
default:
return nil, errors.New("invalid chunk index!")
}

return sendBodyBytes(mockData), nil
}

d := New(esClient, fakeBulk, -1)
d := New(esClient, fakeBulk, nil)
err := d.SendFile(context.Background(), zerolog.Logger{}, buf, chunks, fileID)
require.NoError(t, err)
}
Expand Down Expand Up @@ -306,7 +307,7 @@ func TestSendFileHandlesDisorderedChunks(t *testing.T) {
return sendBodyBytes(sampleDocBody), nil
}

d := New(esClient, fakeBulk, -1)
d := New(esClient, fakeBulk, nil)
err := d.SendFile(context.Background(), zerolog.Logger{}, buf, chunks, fileID)
require.NoError(t, err)
}
Expand Down
Loading