New upstream version 6.1.1+debian

parent c3c6684b
......@@ -7,3 +7,4 @@ testdata/public
/_build
# Avoid distributing redigomock because it is GPLv2
/vendor/github.com/rafaeljusto/redigomock
coverage.html
image: golang:1.8
verify:
image: golang:1.10
script:
- make verify
.test_template: &test_definition
script:
- apt update -qq && apt install -y unzip bzip2
......
......@@ -2,6 +2,18 @@
Formerly known as 'gitlab-git-http-server'.
v 6.1.1
- Allow custom error messages to pass through to Rails !300
v 6.1.0
- Support adding PUT headers for object storage from Rails !297
v 6.0.0
- Accelerate Maven artifact repository uploads !283
v 5.2.0
- Populate Git Protocol !276
......
PREFIX=/usr/local
VERSION=$(shell git describe)-$(shell date -u +%Y%m%d.%H%M%S)
BUILD_DIR = $(shell pwd)
export GOPATH=${BUILD_DIR}/_build
export PATH:=${GOPATH}/bin:${PATH}
GOBUILD=go build -ldflags "-X main.Version=${VERSION}"
PKG=gitlab.com/gitlab-org/gitlab-workhorse
PKG_ALL = $(shell GOPATH=${GOPATH} go list ${PKG}/... | grep -v /vendor/)
EXE_ALL = gitlab-zip-cat gitlab-zip-metadata gitlab-workhorse
all: clean-build $(EXE_ALL)
gitlab-zip-cat: ${BUILD_DIR}/_build/.sync $(shell find cmd/gitlab-zip-cat/ -name '*.go')
${GOBUILD} -o ${BUILD_DIR}/$@ ${PKG}/cmd/$@
gitlab-zip-metadata: ${BUILD_DIR}/_build/.sync $(shell find cmd/gitlab-zip-metadata/ -name '*.go')
${GOBUILD} -o ${BUILD_DIR}/$@ ${PKG}/cmd/$@
gitlab-workhorse: ${BUILD_DIR}/_build/.sync $(shell find . -name '*.go' | grep -v '^\./_')
${GOBUILD} -o ${BUILD_DIR}/$@ ${PKG}
install: gitlab-workhorse gitlab-zip-cat gitlab-zip-metadata
mkdir -p $(DESTDIR)${PREFIX}/bin/
cd ${BUILD_DIR} && install gitlab-workhorse gitlab-zip-cat gitlab-zip-metadata ${DESTDIR}${PREFIX}/bin/
${BUILD_DIR}/_build/.sync:
mkdir -p ${BUILD_DIR}/_build/src/${PKG}
tar -cf - --exclude _build --exclude .git . | (cd ${BUILD_DIR}/_build/src/${PKG} && tar -xf -)
touch $@
.PHONY: test
test: clean-build clean-workhorse govendor prepare-tests
go fmt ${PKG_ALL} | awk '{ print } END { if (NR > 0) { print "Please run go fmt"; exit 1 } }'
_support/detect-context.sh
cd ${GOPATH}/src/${PKG} && govendor sync
cp $(EXE_ALL) ${GOPATH}/src/${PKG}
go test ${PKG_ALL}
@echo SUCCESS
PKG := gitlab.com/gitlab-org/gitlab-workhorse
BUILD_DIR := $(CURDIR)
TARGET_DIR := $(BUILD_DIR)/_build
TARGET_SETUP := $(TARGET_DIR)/.ok
BIN_BUILD_DIR := $(TARGET_DIR)/bin
PKG_BUILD_DIR := $(TARGET_DIR)/src/$(PKG)
COVERAGE_DIR := $(TARGET_DIR)/cover
VERSION := $(shell git describe)-$(shell date -u +%Y%m%d.%H%M%S)
GOBUILD := go build -ldflags "-X main.Version=$(VERSION)"
EXE_ALL := gitlab-zip-cat gitlab-zip-metadata gitlab-workhorse
INSTALL := install
MINIMUM_SUPPORTED_GO_VERSION := 1.8
# Some users may have these variables set in their environment, but doing so could break
# their build process, so unset then
unexport GOROOT
unexport GOBIN
export GOPATH := $(TARGET_DIR)
export PATH := $(GOPATH)/bin:$(PATH)
# Returns a list of all non-vendored (local packages)
LOCAL_PACKAGES = $(shell cd "$(PKG_BUILD_DIR)" && GOPATH=$(GOPATH) go list ./... | grep -v -e '^$(PKG)/vendor/' -e '^$(PKG)/ruby/')
LOCAL_GO_FILES = $(shell find -L $(PKG_BUILD_DIR) -name "*.go" -not -path "$(PKG_BUILD_DIR)/vendor/*" -not -path "$(PKG_BUILD_DIR)/_build/*")
define message
@echo "### $(1)"
endef
.PHONY: govendor
govendor:
command -v govendor || go get github.com/kardianos/govendor
.NOTPARALLEL:
coverage:
go test -cover -coverprofile=test.coverage
.PHONY: all
all: clean-build $(EXE_ALL)
$(TARGET_SETUP):
$(call message,"Setting up target directory")
rm -rf $(TARGET_DIR)
mkdir -p "$(dir $(PKG_BUILD_DIR))"
ln -sf ../../../.. "$(PKG_BUILD_DIR)"
mkdir -p "$(BIN_BUILD_DIR)"
touch "$(TARGET_SETUP)"
gitlab-zip-cat: $(TARGET_SETUP) $(shell find cmd/gitlab-zip-cat/ -name '*.go')
$(call message,Building $@)
$(GOBUILD) -o $(BUILD_DIR)/$@ $(PKG)/cmd/$@
gitlab-zip-metadata: $(TARGET_SETUP) $(shell find cmd/gitlab-zip-metadata/ -name '*.go')
$(call message,Building $@)
$(GOBUILD) -o $(BUILD_DIR)/$@ $(PKG)/cmd/$@
gitlab-workhorse: $(TARGET_SETUP) $(shell find . -name '*.go' | grep -v '^\./_')
$(call message,Building $@)
$(GOBUILD) -o $(BUILD_DIR)/$@ $(PKG)
.PHONY: install
install: gitlab-workhorse gitlab-zip-cat gitlab-zip-metadata
$(call message,$@)
mkdir -p $(DESTDIR)$(PREFIX)/bin/
cd $(BUILD_DIR) && $(INSTALL) gitlab-workhorse gitlab-zip-cat gitlab-zip-metadata $(DESTDIR)$(PREFIX)/bin/
.PHONY: test
test: $(TARGET_SETUP) prepare-tests
$(call message,$@)
@go test $(LOCAL_PACKAGES)
@echo SUCCESS
.PHONY: coverage
coverage: $(TARGET_SETUP) prepare-tests
$(call message,$@)
@go test -cover -coverprofile=test.coverage $(LOCAL_PACKAGES)
go tool cover -html=test.coverage -o coverage.html
rm -f test.coverage
fmt:
go fmt ${PKG_ALL}
.PHONY: clean
.PHONY: clean
clean: clean-workhorse clean-build
$(call message,$@)
rm -rf testdata/data testdata/scratch
.PHONY: clean-workhorse
clean-workhorse:
cd ${BUILD_DIR} && rm -f gitlab-workhorse gitlab-zip-cat gitlab-zip-metadata
$(call message,$@)
rm -f $(EXE_ALL)
.PHONY: release
release:
$(call message,$@)
sh _support/release.sh
.PHONY: clean-build
clean-build:
rm -rf ${BUILD_DIR}/_build
$(call message,$@)
rm -rf $(TARGET_DIR)
.PHONY: prepare-tests
prepare-tests: govendor-sync testdata/data/group/test.git $(EXE_ALL)
.PHONY: prepare-tests
prepare-tests: testdata/data/group/test.git $(EXE_ALL)
testdata/data/group/test.git:
$(call message,$@)
git clone --quiet --bare https://gitlab.com/gitlab-org/gitlab-test.git $@
.PHONY: verify
verify: lint vet detect-context check-formatting megacheck
.PHONY: lint
lint: $(TARGET_SETUP) govendor-sync
$(call message,Verify: $@)
@command -v golint || go get -v golang.org/x/lint/golint
@_support/lint.sh $(LOCAL_PACKAGES)
.PHONY: vet
vet: $(TARGET_SETUP) govendor-sync
$(call message,Verify: $@)
@go vet $(LOCAL_PACKAGES)
.PHONY: detect-context
detect-context: $(TARGET_SETUP)
$(call message,Verify: $@)
_support/detect-context.sh
.PHONY: check-formatting
check-formatting: $(TARGET_SETUP) install-goimports
$(call message,Verify: $@)
@_support/validate-formatting.sh $(LOCAL_GO_FILES)
# Megacheck will tailor some responses given a minimum Go version, so pass that through the CLI
# Additionally, megacheck will not return failure exit codes unless explicitely told to via the
# `-simple.exit-non-zero` `-unused.exit-non-zero` and `-staticcheck.exit-non-zero` flags
.PHONY: megacheck
megacheck: $(TARGET_SETUP) govendor-sync
$(call message,Verify: $@)
@command -v megacheck || go get -v honnef.co/go/tools/cmd/megacheck
@megacheck -go $(MINIMUM_SUPPORTED_GO_VERSION) -simple.exit-non-zero -unused.exit-non-zero -staticcheck.exit-non-zero $(LOCAL_PACKAGES)
# Some vendor components, used for testing are GPL, so we don't distribute them
# and need to go a sync before using them
.PHONY: govendor-sync
govendor-sync: $(TARGET_SETUP)
$(call message,$@)
@command -v govendor || go get github.com/kardianos/govendor
@cd $(PKG_BUILD_DIR) && govendor sync
# In addition to fixing imports, goimports also formats your code in the same style as gofmt
# so it can be used as a replacement.
.PHONY: fmt
fmt: $(TARGET_SETUP) install-goimports
$(call message,$@)
@goimports -w -l $(LOCAL_GO_FILES)
.PHONY: goimports
install-goimports: $(TARGET_SETUP)
$(call message,$@)
@command -v goimports || go get -v golang.org/x/tools/cmd/goimports
#!/bin/sh
# Unfortunately, workhorse fails many lint checks which we currently ignore
LINT_RESULT=$(golint "$@"|grep -Ev 'should have|should be|use ALL_CAPS in Go names')
if [ -n "${LINT_RESULT}" ]; then
echo >&2 "Formatting or imports need fixing: 'make fmt'"
echo ">>${LINT_RESULT}<<"
exit 1
fi
#!/bin/sh
set -eu
IMPORT_RESULT=$(goimports -e -l "$@")
if [ -n "${IMPORT_RESULT}" ]; then
echo >&2 "Formatting or imports need fixing: 'make fmt'"
echo "${IMPORT_RESULT}"
exit 1
fi
......@@ -155,7 +155,7 @@ func TestDownloadCacheHit(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if bytes.Compare(actual, cachedContent) != 0 {
if !bytes.Equal(actual, cachedContent) {
t.Fatal("Unexpected file contents in download")
}
}
......
......@@ -85,6 +85,10 @@ type RemoteObject struct {
DeleteURL string
// StoreURL is the temporary presigned S3 PutObject URL to which upload the first found file
StoreURL string
// Boolean to indicate whether to use headers included in PutHeaders
CustomPutHeaders bool
// PutHeaders are HTTP headers (e.g. Content-Type) to be sent with StoreURL
PutHeaders map[string]string
// ID is a unique identifier of object storage upload
ID string
// Timeout is a number that represents timeout in seconds for sending data to StoreURL
......
......@@ -2,6 +2,7 @@ package badgateway
import (
"bytes"
"context"
"fmt"
"io/ioutil"
"net"
......@@ -12,19 +13,14 @@ import (
"gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
)
// Values from http.DefaultTransport
var DefaultDialer = &net.Dialer{
// defaultDialer is configured to use the values from http.DefaultTransport
var defaultDialer = &net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
DualStack: true,
}
var DefaultTransport = &http.Transport{
Proxy: http.ProxyFromEnvironment, // from http.DefaultTransport
Dial: DefaultDialer.Dial, // from http.DefaultTransport
TLSHandshakeTimeout: 10 * time.Second, // from http.DefaultTransport
}
// Custom error for pretty Sentry 'issues'
// Error is a custom error for pretty Sentry 'issues'
type Error struct{ error }
type RoundTripper struct {
......@@ -36,24 +32,34 @@ func TestRoundTripper(backend *url.URL) *RoundTripper {
return NewRoundTripper(backend, "", 0, true)
}
// NewRoundTripper returns a new RoundTripper instance using the provided values
func NewRoundTripper(backend *url.URL, socket string, proxyHeadersTimeout time.Duration, developmentMode bool) *RoundTripper {
tr := *DefaultTransport
// Copied from the definition of http.DefaultTransport. We can't literally copy http.DefaultTransport because of its hidden internal state.
tr := &http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: defaultDialer.DialContext,
MaxIdleConns: 100,
IdleConnTimeout: 90 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
}
tr.ResponseHeaderTimeout = proxyHeadersTimeout
if backend != nil && socket == "" {
address := mustParseAddress(backend.Host, backend.Scheme)
tr.Dial = func(_, _ string) (net.Conn, error) {
return DefaultDialer.Dial("tcp", address)
tr.DialContext = func(ctx context.Context, network, addr string) (net.Conn, error) {
return defaultDialer.DialContext(ctx, "tcp", address)
}
} else if socket != "" {
tr.Dial = func(_, _ string) (net.Conn, error) {
return DefaultDialer.Dial("unix", socket)
tr.DialContext = func(ctx context.Context, network, addr string) (net.Conn, error) {
return defaultDialer.DialContext(ctx, "unix", socket)
}
} else {
panic("backend is nil and socket is empty")
}
return &RoundTripper{Transport: &tr, developmentMode: developmentMode}
return &RoundTripper{Transport: tr, developmentMode: developmentMode}
}
func mustParseAddress(address, scheme string) string {
......
......@@ -49,7 +49,6 @@ var (
)
type largeBodyError struct{ error }
type watchError struct{ error }
type WatchKeyHandler func(key, value string, timeout time.Duration) (redis.WatchKeyStatus, error)
......
package filestore
import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
"strings"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
)
type PreAuthorizer interface {
PreAuthorizeHandler(next api.HandleFunc, suffix string) http.Handler
}
// UploadVerifier allows to check an upload before sending it to rails
type UploadVerifier interface {
// Verify can abort the upload returning an error
Verify(handler *FileHandler) error
}
// UploadPreparer allows to customize BodyUploader configuration
type UploadPreparer interface {
// Prepare converts api.Response into a *SaveFileOpts, it can optionally return an UploadVerifier that will be
// invoked after the real upload, before the finalization with rails
Prepare(a *api.Response) (*SaveFileOpts, UploadVerifier, error)
}
type defaultPreparer struct{}
func (s *defaultPreparer) Prepare(a *api.Response) (*SaveFileOpts, UploadVerifier, error) {
return GetOpts(a), nil, nil
}
// BodyUploader is an http.Handler that perform a pre authorization call to rails before hijacking the request body and
// uploading it.
// Providing an UploadPreparer allows to customize the upload process
func BodyUploader(rails PreAuthorizer, h http.Handler, p UploadPreparer) http.Handler {
if p == nil {
p = &defaultPreparer{}
}
return rails.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
opts, verifier, err := p.Prepare(a)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("BodyUploader: preparation failed: %v", err))
return
}
fh, err := SaveFileFromReader(r.Context(), r.Body, r.ContentLength, opts)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("BodyUploader: upload failed: %v", err))
return
}
if verifier != nil {
if err := verifier.Verify(fh); err != nil {
helper.Fail500(w, r, fmt.Errorf("BodyUploader: verification failed: %v", err))
return
}
}
data := url.Values{}
for k, v := range fh.GitLabFinalizeFields("file") {
data.Set(k, v)
}
// Hijack body
body := data.Encode()
r.Body = ioutil.NopCloser(strings.NewReader(body))
r.ContentLength = int64(len(body))
r.Header.Set("Content-Type", "application/x-www-form-urlencoded")
// And proxy the request
h.ServeHTTP(w, r)
}, "/authorize")
}
package filestore_test
import (
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/httptest"
"os"
"strconv"
"strings"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
)
const (
fileContent = "A test file content"
fileLen = len(fileContent)
)
func TestBodyUploader(t *testing.T) {
body := strings.NewReader(fileContent)
resp := testUpload(&rails{}, nil, echoProxy(t, fileLen), body)
require.Equal(t, http.StatusOK, resp.StatusCode)
uploadEcho, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err, "Can't read response body")
require.Equal(t, fileContent, string(uploadEcho))
}
func TestBodyUploaderCustomPreparer(t *testing.T) {
body := strings.NewReader(fileContent)
resp := testUpload(&rails{}, &alwaysLocalPreparer{}, echoProxy(t, fileLen), body)
require.Equal(t, http.StatusOK, resp.StatusCode)
uploadEcho, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err, "Can't read response body")
require.Equal(t, fileContent, string(uploadEcho))
}
func TestBodyUploaderCustomVerifier(t *testing.T) {
body := strings.NewReader(fileContent)
verifier := &mockVerifier{}
resp := testUpload(&rails{}, &alwaysLocalPreparer{verifier: verifier}, echoProxy(t, fileLen), body)
require.Equal(t, http.StatusOK, resp.StatusCode)
uploadEcho, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err, "Can't read response body")
require.Equal(t, fileContent, string(uploadEcho))
require.True(t, verifier.invoked, "Verifier.Verify not invoked")
}
func TestBodyUploaderAuthorizationFailure(t *testing.T) {
testNoProxyInvocation(t, http.StatusUnauthorized, &rails{unauthorized: true}, nil)
}
func TestBodyUploaderErrors(t *testing.T) {
tests := []struct {
name string
preparer *alwaysLocalPreparer
}{
{name: "Prepare failure", preparer: &alwaysLocalPreparer{prepareError: fmt.Errorf("")}},
{name: "Verify failure", preparer: &alwaysLocalPreparer{verifier: &alwaysFailsVerifier{}}},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
testNoProxyInvocation(t, http.StatusInternalServerError, &rails{}, test.preparer)
})
}
}
func testNoProxyInvocation(t *testing.T, expectedStatus int, auth filestore.PreAuthorizer, preparer filestore.UploadPreparer) {
proxy := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
require.Fail(t, "request proxied upstream")
})
resp := testUpload(auth, preparer, proxy, nil)
require.Equal(t, expectedStatus, resp.StatusCode)
}
func testUpload(auth filestore.PreAuthorizer, preparer filestore.UploadPreparer, proxy http.Handler, body io.Reader) *http.Response {
req := httptest.NewRequest("POST", "http://example.com/upload", body)
w := httptest.NewRecorder()
filestore.BodyUploader(auth, proxy, preparer).ServeHTTP(w, req)
return w.Result()
}
func echoProxy(t *testing.T, expectedBodyLength int) http.Handler {
require := require.New(t)
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
require.NoError(err)
require.Equal("application/x-www-form-urlencoded", r.Header.Get("Content-Type"), "Wrong Content-Type header")
require.Contains(r.PostForm, "file.md5")
require.Contains(r.PostForm, "file.sha1")
require.Contains(r.PostForm, "file.sha256")
require.Contains(r.PostForm, "file.sha512")
require.Contains(r.PostForm, "file.path")
require.Contains(r.PostForm, "file.size")
require.Equal(strconv.Itoa(expectedBodyLength), r.PostFormValue("file.size"))
path := r.PostFormValue("file.path")
uploaded, err := os.Open(path)
require.NoError(err, "File not uploaded")
//sending back the file for testing purpose
io.Copy(w, uploaded)
})
}
type rails struct {
unauthorized bool
}
func (r *rails) PreAuthorizeHandler(next api.HandleFunc, _ string) http.Handler {
if r.unauthorized {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusUnauthorized)
})
}
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
next(w, r, &api.Response{TempPath: os.TempDir()})
})
}
type alwaysLocalPreparer struct {
verifier filestore.UploadVerifier
prepareError error
}
func (a *alwaysLocalPreparer) Prepare(_ *api.Response) (*filestore.SaveFileOpts, filestore.UploadVerifier, error) {
return filestore.GetOpts(&api.Response{TempPath: os.TempDir()}), a.verifier, a.prepareError
}
type alwaysFailsVerifier struct{}
func (_ alwaysFailsVerifier) Verify(handler *filestore.FileHandler) error {
return fmt.Errorf("Verification failed")
}
type mockVerifier struct {
invoked bool
}
func (m *mockVerifier) Verify(handler *filestore.FileHandler) error {
m.invoked = true
return nil
}
......@@ -100,14 +100,14 @@ func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts
}()
if opts.IsMultipart() {
remoteWriter, err = objectstore.NewMultipart(ctx, opts.PresignedParts, opts.PresignedCompleteMultipart, opts.PresignedAbortMultipart, opts.PresignedDelete, opts.Deadline, opts.PartSize)
remoteWriter, err = objectstore.NewMultipart(ctx, opts.PresignedParts, opts.PresignedCompleteMultipart, opts.PresignedAbortMultipart, opts.PresignedDelete, opts.PutHeaders, opts.Deadline, opts.PartSize)
if err != nil {
return nil, err
}
writers = append(writers, remoteWriter)
} else if opts.IsRemote() {
remoteWriter, err = objectstore.NewObject(ctx, opts.PresignedPut, opts.PresignedDelete, opts.Deadline, size)
remoteWriter, err = objectstore.NewObject(ctx, opts.PresignedPut, opts.PresignedDelete, opts.PutHeaders, opts.Deadline, size)
if err != nil {
return nil, err
}
......
......@@ -23,6 +23,9 @@ type SaveFileOpts struct {
PresignedPut string
// PresignedDelete is a presigned S3 DeleteObject compatible URL.
PresignedDelete string
// HTTP headers to be sent along with PUT request
PutHeaders map[string]string
// Deadline it the S3 operation deadline, the upload will be aborted if not completed in time
Deadline time.Time
......@@ -65,9 +68,17 @@ func GetOpts(apiResponse *api.Response) *SaveFileOpts {
RemoteURL: apiResponse.RemoteObject.GetURL,
PresignedPut: apiResponse.RemoteObject.StoreURL,
PresignedDelete: apiResponse.RemoteObject.DeleteURL,
PutHeaders: apiResponse.RemoteObject.PutHeaders,
Deadline: time.Now().Add(timeout),
}
// Backwards compatibility to ensure API servers that do not include the
// CustomPutHeaders flag will default to the original content type.
if !apiResponse.RemoteObject.CustomPutHeaders {
opts.PutHeaders = make(map[string]string)
opts.PutHeaders["Content-Type"] = "application/octet-stream"
}
if multiParams := apiResponse.RemoteObject.MultipartUpload; multiParams != nil {
opts.PartSize = multiParams.PartSize
opts.PresignedCompleteMultipart = multiParams.CompleteURL
......
......@@ -76,8 +76,10 @@ func TestSaveFileOptsLocalAndRemote(t *testing.T) {
func TestGetOpts(t *testing.T) {
tests := []struct {
name string
multipart *api.MultipartUploadParams
name string
multipart *api.MultipartUploadParams
customPutHeaders bool
putHeaders map[string]string
}{
{
name: "Single upload",
......@@ -90,6 +92,21 @@ func TestGetOpts(t *testing.T) {
PartURLs: []string{"http://part1", "http://part2"},
},
},
{
name: "Single upload with custom content type",
customPutHeaders: true,
putHeaders: map[string]string{"Content-Type": "image/jpeg"},
}, {
name: "Multipart upload with custom content type",
multipart: &api.MultipartUploadParams{
PartSize: 10,
CompleteURL: "http://complete",
AbortURL: "http://abort",
PartURLs: []string{"http://part1", "http://part2"},
},
customPutHeaders: true,
putHeaders: map[string]string{"Content-Type": "image/jpeg"},
},
}
for _, test := range tests {
......@@ -99,12 +116,14 @@ func TestGetOpts(t *testing.T) {
apiResponse := &api.Response{
TempPath: "/tmp",
RemoteObject: api.RemoteObject{
Timeout: 10,
ID: "id",
GetURL: "http://get",
StoreURL: "http://store",
DeleteURL: "http://delete",
MultipartUpload: test.multipart,
Timeout: 10,
ID: "id",