reduce logfile upload limit

This commit is contained in:
Brad Rydzewski 2017-09-12 09:03:32 -07:00
parent 1e64f61009
commit 8795cf039f
4 changed files with 104 additions and 49 deletions

View file

@ -100,9 +100,12 @@ func loop(c *cli.Context) error {
return nil
}
// NOTE we need to limit the size of the logs and files that we upload.
// The maximum grpc payload size is 4194304. So until we implement streaming
// for uploads, we need to set these limits below the maximum.
const (
maxFileUpload = 5000000
maxLogsUpload = 5000000
maxLogsUpload = 2000000 // this is per step
maxFileUpload = 1000000
)
func run(ctx context.Context, client rpc.Peer, filter rpc.Filter) error {

View file

@ -96,18 +96,6 @@ func setupCache(h http.Handler) http.Handler {
)
}
func resetCache(h http.Handler) http.Handler {
return http.HandlerFunc(
func(w http.ResponseWriter, r *http.Request) {
w.Header().Del("Cache-Control")
w.Header().Del("Last-Updated")
w.Header().Del("Expires")
w.Header().Del("ETag")
h.ServeHTTP(w, r)
},
)
}
// WithUser returns a context with the current authenticated user.
func WithUser(c context.Context, user *model.User) context.Context {
return context.WithValue(c, userKey, user)

View file

@ -47,7 +47,15 @@ func (c *client) Next(ctx context.Context, f Filter) (*Pipeline, error) {
if err == nil {
break
}
if grpc.Code(err) == codes.Unknown {
switch grpc.Code(err) {
case
codes.Aborted,
codes.DataLoss,
codes.DeadlineExceeded,
codes.Internal,
codes.Unavailable:
// non-fatal errors
default:
return nil, err
}
if ctx.Err() != nil {
@ -77,7 +85,15 @@ func (c *client) Wait(ctx context.Context, id string) (err error) {
if err == nil {
break
}
if grpc.Code(err) == codes.Unknown {
switch grpc.Code(err) {
case
codes.Aborted,
codes.DataLoss,
codes.DeadlineExceeded,
codes.Internal,
codes.Unavailable:
// non-fatal errors
default:
return err
}
<-time.After(backoff)
@ -101,7 +117,15 @@ func (c *client) Init(ctx context.Context, id string, state State) (err error) {
if err == nil {
break
}
if grpc.Code(err) == codes.Unknown {
switch grpc.Code(err) {
case
codes.Aborted,
codes.DataLoss,
codes.DeadlineExceeded,
codes.Internal,
codes.Unavailable:
// non-fatal errors
default:
return err
}
<-time.After(backoff)
@ -125,7 +149,15 @@ func (c *client) Done(ctx context.Context, id string, state State) (err error) {
if err == nil {
break
}
if grpc.Code(err) == codes.Unknown {
switch grpc.Code(err) {
case
codes.Aborted,
codes.DataLoss,
codes.DeadlineExceeded,
codes.Internal,
codes.Unavailable:
// non-fatal errors
default:
return err
}
<-time.After(backoff)
@ -142,7 +174,15 @@ func (c *client) Extend(ctx context.Context, id string) (err error) {
if err == nil {
break
}
if grpc.Code(err) == codes.Unknown {
switch grpc.Code(err) {
case
codes.Aborted,
codes.DataLoss,
codes.DeadlineExceeded,
codes.Internal,
codes.Unavailable:
// non-fatal errors
default:
return err
}
<-time.After(backoff)
@ -166,7 +206,15 @@ func (c *client) Update(ctx context.Context, id string, state State) (err error)
if err == nil {
break
}
if grpc.Code(err) == codes.Unknown {
switch grpc.Code(err) {
case
codes.Aborted,
codes.DataLoss,
codes.DeadlineExceeded,
codes.Internal,
codes.Unavailable:
// non-fatal errors
default:
return err
}
<-time.After(backoff)
@ -191,7 +239,15 @@ func (c *client) Upload(ctx context.Context, id string, file *File) (err error)
if err == nil {
break
}
if grpc.Code(err) == codes.Unknown {
switch grpc.Code(err) {
case
codes.Aborted,
codes.DataLoss,
codes.DeadlineExceeded,
codes.Internal,
codes.Unavailable:
// non-fatal errors
default:
return err
}
<-time.After(backoff)
@ -213,7 +269,15 @@ func (c *client) Log(ctx context.Context, id string, line *Line) (err error) {
if err == nil {
break
}
if grpc.Code(err) == codes.Unknown {
switch grpc.Code(err) {
case
codes.Aborted,
codes.DataLoss,
codes.DeadlineExceeded,
codes.Internal,
codes.Unavailable:
// non-fatal errors
default:
return err
}
<-time.After(backoff)

54
vendor/vendor.json vendored
View file

@ -39,80 +39,80 @@
{
"checksumSHA1": "W3AuK8ocqHwlUajGmQLFvnRhTZE=",
"path": "github.com/cncd/pipeline/pipeline",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "gc+efbEPGdecp6I2ezd6J3+UL3o=",
"path": "github.com/cncd/pipeline/pipeline/backend",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "IIuOItGMhYP6kLtlZWYBO+liSx4=",
"path": "github.com/cncd/pipeline/pipeline/backend/docker",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "2A3+CnkMfvvO4oRkjQKqi44no0g=",
"path": "github.com/cncd/pipeline/pipeline/frontend",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "wvnLS1c0wKmcDLBsjQO4Dv6iCWA=",
"path": "github.com/cncd/pipeline/pipeline/frontend/yaml",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "hx0Ok9hti6N9qsJB1sJ05HsMHSM=",
"path": "github.com/cncd/pipeline/pipeline/frontend/yaml/compiler",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "cIbHWz3UTpPjiwfDVJgkQ3BYuD4=",
"path": "github.com/cncd/pipeline/pipeline/frontend/yaml/linter",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "kx2sPUIMozPC/g6E4w48h3FfH3k=",
"path": "github.com/cncd/pipeline/pipeline/frontend/yaml/matrix",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "L7Q5qJmPITNmvFEEaj5MPwCWFRk=",
"path": "github.com/cncd/pipeline/pipeline/frontend/yaml/types",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "2/3f3oNmxXy5kcrRLCFa24Oc9O4=",
"path": "github.com/cncd/pipeline/pipeline/interrupt",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "uOjTfke7Qxosrivgz/nVTHeIP5g=",
"path": "github.com/cncd/pipeline/pipeline/multipart",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "t2HtsL+IUCVkjvHk99Jn1Fj8AXU=",
"checksumSHA1": "pyaqbQRdFkOGTE0mSNou27ikvfs=",
"path": "github.com/cncd/pipeline/pipeline/rpc",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "huYd+DhpBP/0kHMAC0mPZAZBmnw=",
"path": "github.com/cncd/pipeline/pipeline/rpc/proto",
"revision": "84197192b579964d3caaaa0ec2328ebfe7cd19cc",
"revisionTime": "2017-08-29T22:35:31Z"
"revision": "0a10375aa44bfebeeaffd0b5010ff9cb01893e2d",
"revisionTime": "2017-09-12T15:56:23Z"
},
{
"checksumSHA1": "7Qj1DK0ceAXkYztW0l3+L6sn+V8=",