refactor: add more linters and fix findings (#78)

This commit is contained in:
Robert Kaussow 2023-02-08 10:14:25 +01:00 committed by GitHub
parent 8f561ae1a0
commit d2730125d8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 264 additions and 98 deletions

View File

@ -8,7 +8,7 @@ platform:
steps: steps:
- name: deps - name: deps
image: golang:1.19 image: golang:1.20
commands: commands:
- make deps - make deps
volumes: volumes:
@ -16,7 +16,7 @@ steps:
path: /go path: /go
- name: lint - name: lint
image: golang:1.19 image: golang:1.20
commands: commands:
- make lint - make lint
volumes: volumes:
@ -24,7 +24,7 @@ steps:
path: /go path: /go
- name: test - name: test
image: golang:1.19 image: golang:1.20
commands: commands:
- make test - make test
volumes: volumes:
@ -51,7 +51,7 @@ platform:
steps: steps:
- name: build - name: build
image: techknowlogick/xgo:go-1.19.x image: techknowlogick/xgo:go-1.20.x
commands: commands:
- ln -s /drone/src /source - ln -s /drone/src /source
- make release - make release
@ -292,6 +292,6 @@ depends_on:
--- ---
kind: signature kind: signature
hmac: 944cf1fa4e35a0f1a4634335ec63ed97dd0f9059d05311fd2211595a7f7626b7 hmac: 915f60726a0be195ae835611939da462c8ce7c6c0ae17f0d6de00750197ba751
... ...

View File

@ -1,25 +1,92 @@
linters: linters:
enable:
- gosimple
- deadcode
- typecheck
- govet
- errcheck
- staticcheck
- unused
- structcheck
- varcheck
- dupl
- gofmt
- misspell
- gocritic
- bidichk
- ineffassign
- revive
- gofumpt
- depguard
enable-all: false enable-all: false
disable-all: true disable-all: true
enable:
- errcheck
- gosimple
- govet
- ineffassign
- staticcheck
- typecheck
- unused
- asasalint
- asciicheck
- bidichk
- bodyclose
- containedctx
- contextcheck
- decorder
- depguard
- dogsled
- dupl
- dupword
- durationcheck
- errchkjson
- errname
- errorlint
- execinquery
- exhaustive
- exportloopref
- forcetypeassert
- ginkgolinter
- gocheckcompilerdirectives
- gochecknoglobals
- gochecknoinits
- gocognit
- goconst
- gocritic
- gocyclo
- godot
- godox
- goerr113
- gofmt
- gofumpt
- goheader
- goimports
- gomnd
- gomoddirectives
- gomodguard
- goprintffuncname
- gosec
- grouper
- importas
- interfacebloat
- ireturn
- lll
- loggercheck
- maintidx
- makezero
- misspell
- musttag
- nakedret
- nestif
- nilerr
- nilnil
- nlreturn
- noctx
- nolintlint
- nonamedreturns
- nosprintfhostport
- prealloc
- predeclared
- promlinter
- reassign
- revive
# - rowserrcheck
# - sqlclosecheck
# - structcheck
- stylecheck
- tagliatelle
- tenv
- testableexamples
- thelper
- tparallel
- unconvert
- unparam
- usestdlibvars
# - wastedassign
- whitespace
- wsl
fast: false fast: false
run: run:
@ -28,4 +95,4 @@ run:
linters-settings: linters-settings:
gofumpt: gofumpt:
extra-rules: true extra-rules: true
lang-version: "1.18" lang-version: "1.20"

View File

@ -19,7 +19,7 @@ GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@$(G
XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
GENERATE ?= GENERATE ?=
XGO_VERSION := go-1.19.x XGO_VERSION := go-1.20.x
XGO_TARGETS ?= linux/amd64,linux/arm-6,linux/arm-7,linux/arm64 XGO_TARGETS ?= linux/amd64,linux/arm-6,linux/arm-7,linux/arm64
TARGETOS ?= linux TARGETOS ?= linux

View File

@ -136,6 +136,7 @@ func settingsFlags(settings *plugin.Settings, category string) []cli.Flag {
&cli.IntFlag{ &cli.IntFlag{
Name: "max-concurrency", Name: "max-concurrency",
Usage: "customize number concurrent files to process", Usage: "customize number concurrent files to process",
//nolint:gomnd
Value: 100, Value: 100,
EnvVars: []string{"PLUGIN_MAX_CONCURRENCY"}, EnvVars: []string{"PLUGIN_MAX_CONCURRENCY"},
Destination: &settings.MaxConcurrency, Destination: &settings.MaxConcurrency,

View File

@ -1,6 +1,7 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"os" "os"
@ -11,11 +12,14 @@ import (
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
) )
//nolint:gochecknoglobals
var ( var (
BuildVersion = "devel" BuildVersion = "devel"
BuildDate = "00000000" BuildDate = "00000000"
) )
var ErrTypeAssertionFailed = errors.New("type assertion failed")
func main() { func main() {
settings := &plugin.Settings{} settings := &plugin.Settings{}
@ -44,12 +48,42 @@ func run(settings *plugin.Settings) cli.ActionFunc {
return func(ctx *cli.Context) error { return func(ctx *cli.Context) error {
urfave.LoggingFromContext(ctx) urfave.LoggingFromContext(ctx)
settings.ACL = ctx.Generic("acl").(*StringMapFlag).Get() acl, ok := ctx.Generic("acl").(*StringMapFlag)
settings.CacheControl = ctx.Generic("cache-control").(*StringMapFlag).Get() if !ok {
settings.ContentType = ctx.Generic("content-type").(*StringMapFlag).Get() return fmt.Errorf("%w: failed to read acl input", ErrTypeAssertionFailed)
settings.ContentEncoding = ctx.Generic("content-encoding").(*StringMapFlag).Get() }
settings.Metadata = ctx.Generic("metadata").(*DeepStringMapFlag).Get()
settings.Redirects = ctx.Generic("redirects").(*MapFlag).Get() cacheControl, ok := ctx.Generic("cache-control").(*StringMapFlag)
if !ok {
return fmt.Errorf("%w: failed to read cache-control input", ErrTypeAssertionFailed)
}
contentType, ok := ctx.Generic("content-type").(*StringMapFlag)
if !ok {
return fmt.Errorf("%w: failed to read content-type input", ErrTypeAssertionFailed)
}
contentEncoding, ok := ctx.Generic("content-encoding").(*StringMapFlag)
if !ok {
return fmt.Errorf("%w: failed to read content-encoding input", ErrTypeAssertionFailed)
}
metadata, ok := ctx.Generic("metadata").(*DeepStringMapFlag)
if !ok {
return fmt.Errorf("%w: failed to read metadata input", ErrTypeAssertionFailed)
}
redirects, ok := ctx.Generic("redirects").(*MapFlag)
if !ok {
return fmt.Errorf("%w: failed to read redirects input", ErrTypeAssertionFailed)
}
settings.ACL = acl.Get()
settings.CacheControl = cacheControl.Get()
settings.ContentType = contentType.Get()
settings.ContentEncoding = contentEncoding.Get()
settings.Metadata = metadata.Get()
settings.Redirects = redirects.Get()
plugin := plugin.New( plugin := plugin.New(
*settings, *settings,

View File

@ -18,9 +18,11 @@ func (d *DeepStringMapFlag) Get() map[string]map[string]string {
func (d *DeepStringMapFlag) Set(value string) error { func (d *DeepStringMapFlag) Set(value string) error {
d.parts = map[string]map[string]string{} d.parts = map[string]map[string]string{}
err := json.Unmarshal([]byte(value), &d.parts) err := json.Unmarshal([]byte(value), &d.parts)
if err != nil { if err != nil {
single := map[string]string{} single := map[string]string{}
err := json.Unmarshal([]byte(value), &single) err := json.Unmarshal([]byte(value), &single)
if err != nil { if err != nil {
return err return err
@ -46,10 +48,12 @@ func (s *StringMapFlag) Get() map[string]string {
func (s *StringMapFlag) Set(value string) error { func (s *StringMapFlag) Set(value string) error {
s.parts = map[string]string{} s.parts = map[string]string{}
err := json.Unmarshal([]byte(value), &s.parts) err := json.Unmarshal([]byte(value), &s.parts)
if err != nil { if err != nil {
s.parts["*"] = value s.parts["*"] = value
} }
return nil return nil
} }
@ -67,5 +71,6 @@ func (m *MapFlag) Get() map[string]string {
func (m *MapFlag) Set(value string) error { func (m *MapFlag) Set(value string) error {
m.parts = map[string]string{} m.parts = map[string]string{}
return json.Unmarshal([]byte(value), &m.parts) return json.Unmarshal([]byte(value), &m.parts)
} }

2
go.mod
View File

@ -1,6 +1,6 @@
module github.com/thegeeklab/drone-s3-sync module github.com/thegeeklab/drone-s3-sync
go 1.19 go 1.20
require ( require (
github.com/aws/aws-sdk-go v1.44.192 github.com/aws/aws-sdk-go v1.44.192

View File

@ -1,7 +1,9 @@
package plugin package plugin
import ( import (
//nolint:gosec
"crypto/md5" "crypto/md5"
"errors"
"fmt" "fmt"
"io" "io"
"mime" "mime"
@ -28,20 +30,20 @@ type AWS struct {
plugin *Plugin plugin *Plugin
} }
func NewAWS(p *Plugin) AWS { func NewAWS(plugin *Plugin) AWS {
sessCfg := &aws.Config{ sessCfg := &aws.Config{
S3ForcePathStyle: aws.Bool(p.settings.PathStyle), S3ForcePathStyle: aws.Bool(plugin.settings.PathStyle),
Region: aws.String(p.settings.Region), Region: aws.String(plugin.settings.Region),
} }
if p.settings.Endpoint != "" { if plugin.settings.Endpoint != "" {
sessCfg.Endpoint = &p.settings.Endpoint sessCfg.Endpoint = &plugin.settings.Endpoint
sessCfg.DisableSSL = aws.Bool(strings.HasPrefix(p.settings.Endpoint, "http://")) sessCfg.DisableSSL = aws.Bool(strings.HasPrefix(plugin.settings.Endpoint, "http://"))
} }
// allowing to use the instance role or provide a key and secret // allowing to use the instance role or provide a key and secret
if p.settings.AccessKey != "" && p.settings.SecretKey != "" { if plugin.settings.AccessKey != "" && plugin.settings.SecretKey != "" {
sessCfg.Credentials = credentials.NewStaticCredentials(p.settings.AccessKey, p.settings.SecretKey, "") sessCfg.Credentials = credentials.NewStaticCredentials(plugin.settings.AccessKey, plugin.settings.SecretKey, "")
} }
sess, _ := session.NewSession(sessCfg) sess, _ := session.NewSession(sessCfg)
@ -51,11 +53,13 @@ func NewAWS(p *Plugin) AWS {
r := make([]string, 1) r := make([]string, 1)
l := make([]string, 1) l := make([]string, 1)
return AWS{c, cf, r, l, p} return AWS{c, cf, r, l, plugin}
} }
//nolint:gocognit,gocyclo,maintidx
func (a *AWS) Upload(local, remote string) error { func (a *AWS) Upload(local, remote string) error {
p := a.plugin plugin := a.plugin
if local == "" { if local == "" {
return nil return nil
} }
@ -68,9 +72,11 @@ func (a *AWS) Upload(local, remote string) error {
defer file.Close() defer file.Close()
var acl string var acl string
for pattern := range p.settings.ACL {
for pattern := range plugin.settings.ACL {
if match := glob.Glob(pattern, local); match { if match := glob.Glob(pattern, local); match {
acl = p.settings.ACL[pattern] acl = plugin.settings.ACL[pattern]
break break
} }
} }
@ -82,9 +88,11 @@ func (a *AWS) Upload(local, remote string) error {
fileExt := filepath.Ext(local) fileExt := filepath.Ext(local)
var contentType string var contentType string
for patternExt := range p.settings.ContentType {
for patternExt := range plugin.settings.ContentType {
if patternExt == fileExt { if patternExt == fileExt {
contentType = p.settings.ContentType[patternExt] contentType = plugin.settings.ContentType[patternExt]
break break
} }
} }
@ -94,43 +102,58 @@ func (a *AWS) Upload(local, remote string) error {
} }
var contentEncoding string var contentEncoding string
for patternExt := range p.settings.ContentEncoding {
for patternExt := range plugin.settings.ContentEncoding {
if patternExt == fileExt { if patternExt == fileExt {
contentEncoding = p.settings.ContentEncoding[patternExt] contentEncoding = plugin.settings.ContentEncoding[patternExt]
break break
} }
} }
var cacheControl string var cacheControl string
for pattern := range p.settings.CacheControl {
for pattern := range plugin.settings.CacheControl {
if match := glob.Glob(pattern, local); match { if match := glob.Glob(pattern, local); match {
cacheControl = p.settings.CacheControl[pattern] cacheControl = plugin.settings.CacheControl[pattern]
break break
} }
} }
metadata := map[string]*string{} metadata := map[string]*string{}
for pattern := range p.settings.Metadata {
for pattern := range plugin.settings.Metadata {
if match := glob.Glob(pattern, local); match { if match := glob.Glob(pattern, local); match {
for k, v := range p.settings.Metadata[pattern] { for k, v := range plugin.settings.Metadata[pattern] {
metadata[k] = aws.String(v) metadata[k] = aws.String(v)
} }
break break
} }
} }
var AWSErr awserr.Error
head, err := a.client.HeadObject(&s3.HeadObjectInput{ head, err := a.client.HeadObject(&s3.HeadObjectInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Key: aws.String(remote), Key: aws.String(remote),
}) })
if err != nil && err.(awserr.Error).Code() != "404" { if err != nil && errors.As(err, &AWSErr) {
//nolint:errorlint,forcetypeassert
if err.(awserr.Error).Code() == "404" { if err.(awserr.Error).Code() == "404" {
return err return err
} }
logrus.Debugf("'%s' not found in bucket, uploading with content-type '%s' and permissions '%s'", local, contentType, acl) logrus.Debugf(
"'%s' not found in bucket, uploading with content-type '%s' and permissions '%s'",
local,
contentType,
acl,
)
putObject := &s3.PutObjectInput{ putObject := &s3.PutObjectInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Key: aws.String(remote), Key: aws.String(remote),
Body: file, Body: file,
ContentType: aws.String(contentType), ContentType: aws.String(contentType),
@ -152,48 +175,58 @@ func (a *AWS) Upload(local, remote string) error {
} }
_, err = a.client.PutObject(putObject) _, err = a.client.PutObject(putObject)
return err return err
} }
//nolint:gosec
hash := md5.New() hash := md5.New()
_, _ = io.Copy(hash, file) _, _ = io.Copy(hash, file)
sum := fmt.Sprintf("'%x'", hash.Sum(nil)) sum := fmt.Sprintf("'%x'", hash.Sum(nil))
//nolint:nestif
if sum == *head.ETag { if sum == *head.ETag {
shouldCopy := false shouldCopy := false
if head.ContentType == nil && contentType != "" { if head.ContentType == nil && contentType != "" {
logrus.Debugf("content-type has changed from unset to %s", contentType) logrus.Debugf("content-type has changed from unset to %s", contentType)
shouldCopy = true shouldCopy = true
} }
if !shouldCopy && head.ContentType != nil && contentType != *head.ContentType { if !shouldCopy && head.ContentType != nil && contentType != *head.ContentType {
logrus.Debugf("content-type has changed from %s to %s", *head.ContentType, contentType) logrus.Debugf("content-type has changed from %s to %s", *head.ContentType, contentType)
shouldCopy = true shouldCopy = true
} }
if !shouldCopy && head.ContentEncoding == nil && contentEncoding != "" { if !shouldCopy && head.ContentEncoding == nil && contentEncoding != "" {
logrus.Debugf("Content-Encoding has changed from unset to %s", contentEncoding) logrus.Debugf("Content-Encoding has changed from unset to %s", contentEncoding)
shouldCopy = true shouldCopy = true
} }
if !shouldCopy && head.ContentEncoding != nil && contentEncoding != *head.ContentEncoding { if !shouldCopy && head.ContentEncoding != nil && contentEncoding != *head.ContentEncoding {
logrus.Debugf("Content-Encoding has changed from %s to %s", *head.ContentEncoding, contentEncoding) logrus.Debugf("Content-Encoding has changed from %s to %s", *head.ContentEncoding, contentEncoding)
shouldCopy = true shouldCopy = true
} }
if !shouldCopy && head.CacheControl == nil && cacheControl != "" { if !shouldCopy && head.CacheControl == nil && cacheControl != "" {
logrus.Debugf("cache-control has changed from unset to %s", cacheControl) logrus.Debugf("cache-control has changed from unset to %s", cacheControl)
shouldCopy = true shouldCopy = true
} }
if !shouldCopy && head.CacheControl != nil && cacheControl != *head.CacheControl { if !shouldCopy && head.CacheControl != nil && cacheControl != *head.CacheControl {
logrus.Debugf("cache-control has changed from %s to %s", *head.CacheControl, cacheControl) logrus.Debugf("cache-control has changed from %s to %s", *head.CacheControl, cacheControl)
shouldCopy = true shouldCopy = true
} }
if !shouldCopy && len(head.Metadata) != len(metadata) { if !shouldCopy && len(head.Metadata) != len(metadata) {
logrus.Debugf("count of metadata values has changed for %s", local) logrus.Debugf("count of metadata values has changed for %s", local)
shouldCopy = true shouldCopy = true
} }
@ -202,7 +235,9 @@ func (a *AWS) Upload(local, remote string) error {
if hv, ok := head.Metadata[k]; ok { if hv, ok := head.Metadata[k]; ok {
if *v != *hv { if *v != *hv {
logrus.Debugf("metadata values have changed for %s", local) logrus.Debugf("metadata values have changed for %s", local)
shouldCopy = true shouldCopy = true
break break
} }
} }
@ -211,7 +246,7 @@ func (a *AWS) Upload(local, remote string) error {
if !shouldCopy { if !shouldCopy {
grant, err := a.client.GetObjectAcl(&s3.GetObjectAclInput{ grant, err := a.client.GetObjectAcl(&s3.GetObjectAclInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Key: aws.String(remote), Key: aws.String(remote),
}) })
if err != nil { if err != nil {
@ -219,18 +254,20 @@ func (a *AWS) Upload(local, remote string) error {
} }
previousACL := "private" previousACL := "private"
for _, g := range grant.Grants {
gt := *g.Grantee for _, grant := range grant.Grants {
if gt.URI != nil { grantee := *grant.Grantee
if *gt.URI == "http://acs.amazonaws.com/groups/global/AllUsers" { if grantee.URI != nil {
if *g.Permission == "READ" { if *grantee.URI == "http://acs.amazonaws.com/groups/global/AllUsers" {
if *grant.Permission == "READ" {
previousACL = "public-read" previousACL = "public-read"
} else if *g.Permission == "WRITE" { } else if *grant.Permission == "WRITE" {
previousACL = "public-read-write" previousACL = "public-read-write"
} }
} }
if *gt.URI == "http://acs.amazonaws.com/groups/global/AuthenticatedUsers" {
if *g.Permission == "READ" { if *grantee.URI == "http://acs.amazonaws.com/groups/global/AuthenticatedUsers" {
if *grant.Permission == "READ" {
previousACL = "authenticated-read" previousACL = "authenticated-read"
} }
} }
@ -239,20 +276,23 @@ func (a *AWS) Upload(local, remote string) error {
if previousACL != acl { if previousACL != acl {
logrus.Debugf("permissions for '%s' have changed from '%s' to '%s'", remote, previousACL, acl) logrus.Debugf("permissions for '%s' have changed from '%s' to '%s'", remote, previousACL, acl)
shouldCopy = true shouldCopy = true
} }
} }
if !shouldCopy { if !shouldCopy {
logrus.Debugf("skipping '%s' because hashes and metadata match", local) logrus.Debugf("skipping '%s' because hashes and metadata match", local)
return nil return nil
} }
logrus.Debugf("updating metadata for '%s' content-type: '%s', ACL: '%s'", local, contentType, acl) logrus.Debugf("updating metadata for '%s' content-type: '%s', ACL: '%s'", local, contentType, acl)
copyObject := &s3.CopyObjectInput{ copyObject := &s3.CopyObjectInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Key: aws.String(remote), Key: aws.String(remote),
CopySource: aws.String(fmt.Sprintf("%s/%s", p.settings.Bucket, remote)), CopySource: aws.String(fmt.Sprintf("%s/%s", plugin.settings.Bucket, remote)),
ACL: aws.String(acl), ACL: aws.String(acl),
ContentType: aws.String(contentType), ContentType: aws.String(contentType),
Metadata: metadata, Metadata: metadata,
@ -273,6 +313,7 @@ func (a *AWS) Upload(local, remote string) error {
} }
_, err = a.client.CopyObject(copyObject) _, err = a.client.CopyObject(copyObject)
return err return err
} }
@ -282,8 +323,9 @@ func (a *AWS) Upload(local, remote string) error {
} }
logrus.Debugf("uploading '%s' with content-type '%s' and permissions '%s'", local, contentType, acl) logrus.Debugf("uploading '%s' with content-type '%s' and permissions '%s'", local, contentType, acl)
putObject := &s3.PutObjectInput{ putObject := &s3.PutObjectInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Key: aws.String(remote), Key: aws.String(remote),
Body: file, Body: file,
ContentType: aws.String(contentType), ContentType: aws.String(contentType),
@ -305,11 +347,13 @@ func (a *AWS) Upload(local, remote string) error {
} }
_, err = a.client.PutObject(putObject) _, err = a.client.PutObject(putObject)
return err return err
} }
func (a *AWS) Redirect(path, location string) error { func (a *AWS) Redirect(path, location string) error {
p := a.plugin plugin := a.plugin
logrus.Debugf("adding redirect from '%s' to '%s'", path, location) logrus.Debugf("adding redirect from '%s' to '%s'", path, location)
if a.plugin.settings.DryRun { if a.plugin.settings.DryRun {
@ -317,16 +361,18 @@ func (a *AWS) Redirect(path, location string) error {
} }
_, err := a.client.PutObject(&s3.PutObjectInput{ _, err := a.client.PutObject(&s3.PutObjectInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Key: aws.String(path), Key: aws.String(path),
ACL: aws.String("public-read"), ACL: aws.String("public-read"),
WebsiteRedirectLocation: aws.String(location), WebsiteRedirectLocation: aws.String(location),
}) })
return err return err
} }
func (a *AWS) Delete(remote string) error { func (a *AWS) Delete(remote string) error {
p := a.plugin plugin := a.plugin
logrus.Debugf("removing remote file '%s'", remote) logrus.Debugf("removing remote file '%s'", remote)
if a.plugin.settings.DryRun { if a.plugin.settings.DryRun {
@ -334,17 +380,20 @@ func (a *AWS) Delete(remote string) error {
} }
_, err := a.client.DeleteObject(&s3.DeleteObjectInput{ _, err := a.client.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Key: aws.String(remote), Key: aws.String(remote),
}) })
return err return err
} }
func (a *AWS) List(path string) ([]string, error) { func (a *AWS) List(path string) ([]string, error) {
p := a.plugin plugin := a.plugin
remote := make([]string, 1)
remote := make([]string, 0)
resp, err := a.client.ListObjects(&s3.ListObjectsInput{ resp, err := a.client.ListObjects(&s3.ListObjectsInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Prefix: aws.String(path), Prefix: aws.String(path),
}) })
if err != nil { if err != nil {
@ -357,7 +406,7 @@ func (a *AWS) List(path string) ([]string, error) {
for *resp.IsTruncated { for *resp.IsTruncated {
resp, err = a.client.ListObjects(&s3.ListObjectsInput{ resp, err = a.client.ListObjects(&s3.ListObjectsInput{
Bucket: aws.String(p.settings.Bucket), Bucket: aws.String(plugin.settings.Bucket),
Prefix: aws.String(path), Prefix: aws.String(path),
Marker: aws.String(remote[len(remote)-1]), Marker: aws.String(remote[len(remote)-1]),
}) })
@ -376,7 +425,9 @@ func (a *AWS) List(path string) ([]string, error) {
func (a *AWS) Invalidate(invalidatePath string) error { func (a *AWS) Invalidate(invalidatePath string) error {
p := a.plugin p := a.plugin
logrus.Debugf("invalidating '%s'", invalidatePath) logrus.Debugf("invalidating '%s'", invalidatePath)
_, err := a.cfClient.CreateInvalidation(&cloudfront.CreateInvalidationInput{ _, err := a.cfClient.CreateInvalidation(&cloudfront.CreateInvalidationInput{
DistributionId: aws.String(p.settings.CloudFrontDistribution), DistributionId: aws.String(p.settings.CloudFrontDistribution),
InvalidationBatch: &cloudfront.InvalidationBatch{ InvalidationBatch: &cloudfront.InvalidationBatch{
@ -389,5 +440,6 @@ func (a *AWS) Invalidate(invalidatePath string) error {
}, },
}, },
}) })
return err return err
} }

View File

@ -44,14 +44,13 @@ type Result struct {
err error err error
} }
var MissingAwsValuesMessage = "Must set 'bucket'"
// Validate handles the settings validation of the plugin. // Validate handles the settings validation of the plugin.
func (p *Plugin) Validate() error { func (p *Plugin) Validate() error {
wd, err := os.Getwd() wd, err := os.Getwd()
if err != nil { if err != nil {
return fmt.Errorf("error while retrieving working directory: %w", err) return fmt.Errorf("error while retrieving working directory: %w", err)
} }
p.settings.Source = filepath.Join(wd, p.settings.Source) p.settings.Source = filepath.Join(wd, p.settings.Source)
p.settings.Target = strings.TrimPrefix(p.settings.Target, "/") p.settings.Target = strings.TrimPrefix(p.settings.Target, "/")
@ -88,7 +87,7 @@ func (p *Plugin) createSyncJobs() error {
return err return err
} }
local := make([]string, 1) local := make([]string, 0)
err = filepath.Walk(p.settings.Source, func(path string, info os.FileInfo, err error) error { err = filepath.Walk(p.settings.Source, func(path string, info os.FileInfo, err error) error {
if err != nil || info.IsDir() { if err != nil || info.IsDir() {
@ -122,13 +121,16 @@ func (p *Plugin) createSyncJobs() error {
action: "redirect", action: "redirect",
}) })
} }
if p.settings.Delete { if p.settings.Delete {
for _, r := range remote { for _, remote := range remote {
found := false found := false
rPath := strings.TrimPrefix(r, p.settings.Target+"/") remotePath := strings.TrimPrefix(remote, p.settings.Target+"/")
for _, l := range local { for _, l := range local {
if l == rPath { if l == remotePath {
found = true found = true
break break
} }
} }
@ -136,7 +138,7 @@ func (p *Plugin) createSyncJobs() error {
if !found { if !found {
p.settings.Jobs = append(p.settings.Jobs, Job{ p.settings.Jobs = append(p.settings.Jobs, Job{
local: "", local: "",
remote: r, remote: remote,
action: "delete", action: "delete",
}) })
} }
@ -150,41 +152,46 @@ func (p *Plugin) runJobs() error {
client := p.settings.Client client := p.settings.Client
jobChan := make(chan struct{}, p.settings.MaxConcurrency) jobChan := make(chan struct{}, p.settings.MaxConcurrency)
results := make(chan *Result, len(p.settings.Jobs)) results := make(chan *Result, len(p.settings.Jobs))
var invalidateJob *Job var invalidateJob *Job
logrus.Infof("Synchronizing with bucket '%s'", p.settings.Bucket) logrus.Infof("Synchronizing with bucket '%s'", p.settings.Bucket)
for _, j := range p.settings.Jobs {
for _, job := range p.settings.Jobs {
jobChan <- struct{}{} jobChan <- struct{}{}
go func(j Job) {
go func(job Job) {
var err error var err error
switch j.action {
switch job.action {
case "upload": case "upload":
err = client.Upload(j.local, j.remote) err = client.Upload(job.local, job.remote)
case "redirect": case "redirect":
err = client.Redirect(j.local, j.remote) err = client.Redirect(job.local, job.remote)
case "delete": case "delete":
err = client.Delete(j.remote) err = client.Delete(job.remote)
case "invalidateCloudFront": case "invalidateCloudFront":
invalidateJob = &j invalidateJob = &job
default: default:
err = nil err = nil
} }
results <- &Result{j, err} results <- &Result{job, err}
<-jobChan <-jobChan
}(j) }(job)
} }
for range p.settings.Jobs { for range p.settings.Jobs {
r := <-results r := <-results
if r.err != nil { if r.err != nil {
return fmt.Errorf("failed to %s %s to %s: %+v", r.j.action, r.j.local, r.j.remote, r.err) return fmt.Errorf("failed to %s %s to %s: %w", r.j.action, r.j.local, r.j.remote, r.err)
} }
} }
if invalidateJob != nil { if invalidateJob != nil {
err := client.Invalidate(invalidateJob.remote) err := client.Invalidate(invalidateJob.remote)
if err != nil { if err != nil {
return fmt.Errorf("failed to %s %s to %s: %+v", invalidateJob.action, invalidateJob.local, invalidateJob.remote, err) return fmt.Errorf("failed to %s %s to %s: %w", invalidateJob.action, invalidateJob.local, invalidateJob.remote, err)
} }
} }

View File

@ -12,7 +12,7 @@ type Plugin struct {
} }
// New initializes a plugin from the given Settings, Pipeline, and Network. // New initializes a plugin from the given Settings, Pipeline, and Network.
func New(settings Settings, pipeline drone.Pipeline, network drone.Network) drone.Plugin { func New(settings Settings, pipeline drone.Pipeline, network drone.Network) *Plugin {
return &Plugin{ return &Plugin{
settings: settings, settings: settings,
pipeline: pipeline, pipeline: pipeline,