Unverified Commit 3752757c authored by Scott Miller's avatar Scott Miller Committed by GitHub
Browse files

Merge branch 'master' into base62-encode-decode

parents 076126e5 9b87d7f5
Showing with 2515 additions and 1378 deletions
+2515 -1378
This diff is collapsed.
This diff is collapsed.
{{- $data := (datasource "package-list") -}}
{{- /*
BUILD_ID is set by the staging workflow to produce an identifiable build.
*/ -}}
{{- $buildID := (env.Getenv "BUILD_ID" "standalone") -}}
{{- $workflowName := (env.Getenv "RELEASE_BUILD_WORKFLOW_NAME" "build-standalone") -}}
{{- $packages := $data.packages -}}
{{- $layers := $data.layers -}}
{{- $revision := (env.Getenv "PRODUCT_REVISION") -}}
{{- define "cache-key"}}{{template "cache-version"}}-{{.}}{{end -}}
{{- define "cache-version"}}cache002{{end -}}
{{- /*
Any change to cache-version invalidates all build layer and package caches.
*/ -}}
# Current cache version: {{template "cache-version"}}
executors:
releaser:
docker:
- image: circleci/buildpack-deps
environment:
PRODUCT_REVISION: "{{if $revision}}{{$revision}}{{end}}"
AUTO_INSTALL_TOOLS: 'YES'
shell: /usr/bin/env bash -euo pipefail -c
workflows:
{{$workflowName}}:
jobs:
- cache-builder-images:
filters:
branches:
only:
- /build-.*/
- /ci.*/
{{- range $packages}}
- {{.meta.BUILD_JOB_NAME}}: { requires: [ cache-builder-images ] }
{{- end }}
- bundle-releases:
requires:
{{- range $packages}}
- {{.meta.BUILD_JOB_NAME}}{{end}}
jobs:
cache-builder-images:
executor: releaser
steps:
- setup_remote_docker
- checkout
- write-build-layer-cache-keys
# Load best available cached image.
{{- $targetLayerType := "build-static-assets" }}
{{- $targetLayer := .}}
{{- range $layers}}
{{- if eq .type $targetLayerType }}
{{- $targetLayer = .}}
- restore_cache:
keys:
{{- range .meta.circleci.CACHE_KEY_PREFIX_LIST}}
- {{template "cache-key" .}}
{{- end}}
- run:
name: Finish early if loaded exact match from cache.
command: |
if [ -f {{.archivefile}} ]; then
echo "Exact match found in cache, skipping build."
circleci-agent step halt
else
echo "No exact match found, proceeding with build."
fi
- run: LAYER_SPEC_ID={{.name}} make -C packages*.lock load-builder-cache
{{- end}}{{end}}
# No exact match was found, so build each layer up to target type.
{{- $finished := false }}
{{- range $layers}}
{{- if not $finished }}
{{- $finished = eq .type $targetLayerType}}
- run: make -f packages*.lock/layer.mk {{.name}}-image
{{- end}}
{{- end}}
# Save the target layer archive.
- run: make -f packages*.lock/layer.mk {{$targetLayer.name}}-save
# Store the target layer archive as all the relevant cache names.
{{- $lastArchive := $targetLayer.archivefile}}
{{- range $i, $l := $targetLayer.meta.builtin.LAYER_LIST}}
{{- $currentArchive := $l.archive}}
{{- if ne $currentArchive $lastArchive }}
- run: mv {{$lastArchive}} {{$currentArchive}}
{{- end}}
- save_cache:
key: {{template "cache-key" (index $targetLayer.meta.circleci.CACHE_KEY_PREFIX_LIST $i)}}
paths:
- {{$currentArchive}}
{{- $lastArchive = $currentArchive }}
{{- end}}
{{- range $packages}}
{{.meta.BUILD_JOB_NAME}}:
executor: releaser
environment:
- PACKAGE_SPEC_ID: {{.packagespecid}}
steps:
- setup_remote_docker
- checkout
# Restore the package cache first, we might not need to rebuild.
- write-package-cache-key
- restore_cache:
key: '{{template "cache-key" .meta.circleci.PACKAGE_CACHE_KEY}}'
- run:
name: Check the cache status.
command: |
if ! { PKG=$(find .buildcache/packages/store -maxdepth 1 -mindepth 1 -name '*.zip' 2> /dev/null) && [ -n "$PKG" ]; }; then
echo "No package found, continuing with build."
exit 0
fi
echo "Package already cached, skipping build."
circleci-agent step halt
# We need to rebuild, so load the builder cache.
- write-build-layer-cache-keys
- restore_cache:
keys:
{{- range .meta.circleci.BUILDER_CACHE_KEY_PREFIX_LIST}}
- {{template "cache-key" .}}
{{- end}}
- run: make -C packages*.lock load-builder-cache
- run: make -C packages*.lock package
- run: ls -lahR .buildcache/packages
# Save package cache.
- save_cache:
key: '{{template "cache-key" .meta.circleci.PACKAGE_CACHE_KEY}}'
paths:
- .buildcache/packages/store
# Save builder image cache if necessary.
# The range should only iterate over a single layer.
{{- $pkg := . -}}
{{- range $idx, $layerInfo := .meta.builtin.BUILD_LAYERS }}
{{- if eq $layerInfo.type "warm-go-build-vendor-cache" }}
{{- with $layerInfo }}
{{- $circleCICacheKey := (index $pkg.meta.circleci.BUILDER_CACHE_KEY_PREFIX_LIST $idx) }}
- run:
name: Check builder cache status
command: |
if [ -f {{.archive}} ]; then
echo "Builder image already cached, skipping cache step."
circleci-agent step halt
fi
- run: make -f packages*.lock/layer.mk {{.name}}-save
- save_cache:
key: '{{template "cache-key" $circleCICacheKey}}'
paths:
- {{.archive}}
{{- end}}
{{- end}}
{{- end}}
{{end}}
bundle-releases:
executor: releaser
steps:
- checkout
- write-all-package-cache-keys
{{- range $packages}}
- load-{{.meta.BUILD_JOB_NAME}}
- run:
environment:
PACKAGE_SPEC_ID: {{.packagespecid}}
name: Write package metadata for {{.meta.BUILD_JOB_NAME}}
command: |
make package-meta
{{- end}}
- run:
name: Write package aliases
command:
make aliases
- run:
name: List Build Cache
command: ls -lahR .buildcache
# Surface the package store directory as an artifact.
# This makes each zipped package separately downloadable.
- store_artifacts:
path: .buildcache/packages
destination: packages-{{$buildID}}
# Surface a tarball of the whole package store as an artifact.
- run: tar -czf packages-{{$buildID}}.tar.gz .buildcache/packages
- store_artifacts:
path: packages-{{$buildID}}.tar.gz
destination: packages-{{$buildID}}.tar.gz
# Surface a tarball of just the metadata files.
- run: tar -czf meta-{{$buildID}}.tar.gz .buildcache/packages/store/*.json
- store_artifacts:
path: meta-{{$buildID}}.tar.gz
destination: meta-{{$buildID}}.tar.gz
commands:
{{- range $packages }}
load-{{.meta.BUILD_JOB_NAME}}:
steps:
- restore_cache:
key: '{{template "cache-key" .meta.circleci.PACKAGE_CACHE_KEY}}'
{{end}}
write-build-layer-cache-keys:
steps:
- run:
name: Write builder layer cache keys
command: make -C packages*.lock write-builder-cache-keys
write-package-cache-key:
steps:
- run:
name: Write package cache key
command: make -C packages*.lock write-package-cache-key
write-all-package-cache-keys:
steps:
- run:
name: Write package cache key
command: make -C packages*.lock write-all-package-cache-keys
vendor/* linguist-vendored
website/* linguist-documentation
/packagespec.mk linguist-generated
\ No newline at end of file
## Next
CHANGES:
* agent: Agent now properly returns a non-zero exit code on error, such as one due to template rendering failure. Using `error_on_missing_key` in the template config will cause agent to immediately exit on failure. In order to make agent properly exit due to continuous failure from template rendering errors, the old behavior of indefinitely restarting the template server is now changed to exit once the default retry attempt of 12 times (with exponential backoff) gets exhausted. [[GH-9670](https://github.com/hashicorp/vault/pull/9670)]
FEATURES:
* **Couchbase Secrets**: Vault can now manage static and dynamic credentials for Couchbase. [[GH-9664](https://github.com/hashicorp/vault/pull/9664)]
......@@ -12,6 +16,7 @@ IMPROVEMENTS:
* command/server: Delay informational messages in -dev mode until logs have settled. [[GH-9702](https://github.com/hashicorp/vault/pull/9702)]
* command/server: Add environment variable support for `disable_mlock`. [[GH-9931](https://github.com/hashicorp/vault/pull/9931)]
* sdk/framework: Add a time type for API fields. [[GH-9911](https://github.com/hashicorp/vault/pull/9911)]
* seal/awskms: Add logging during awskms auto-unseal [[GH-9794](https://github.com/hashicorp/vault/pull/9794)]
BUG FIXES:
......@@ -23,6 +28,16 @@ BUG FIXES:
* secrets/database: Fix handling of TLS options in mongodb connection strings [[GH-9519](https://github.com/hashicorp/vault/pull/9519)]
* secrets/gcp: Ensure that the IAM policy version is appropriately set after a roleset's bindings have changed. [[GH-93](https://github.com/hashicorp/vault-plugin-secrets-gcp/pull/93)]
## 1.5.5
### TBD
BUG FIXES:
* auth/aws: Restrict region selection when in the aws-us-gov partition to avoid IAM errors [[GH-9947](https://github.com/hashicorp/vault/pull/9947)]
* core: Fix deadlock in handling EGP policies
* core (enterprise): Fix extraneous error messages in DR Cluster
* secrets/mysql: Conditionally overwrite TLS parameters for MySQL secrets engine [[GH-9729](https://github.com/hashicorp/vault/pull/9729)]
## 1.5.4
### September 24th, 2020
......
This diff is collapsed.
......@@ -320,12 +320,21 @@ func generatePartitionToRegionMap() map[string]*endpoints.Region {
for _, p := range partitions {
// For most partitions, it's fine to choose a single region randomly.
// However, for the "aws" partition, it's best to choose "us-east-1"
// because it is always enabled (and enabled for STS) by default.
// However, there are a few exceptions:
//
// For "aws", choose "us-east-1" because it is always enabled (and
// enabled for STS) by default.
//
// For "aws-us-gov", choose "us-gov-west-1" because it is the only
// valid region for IAM operations.
// ref: https://github.com/aws/aws-sdk-go/blob/v1.34.25/aws/endpoints/defaults.go#L8176-L8194
for _, r := range p.Regions() {
if p.ID() == "aws" && r.ID() != "us-east-1" {
continue
}
if p.ID() == "aws-us-gov" && r.ID() != "us-gov-west-1" {
continue
}
partitionToRegion[p.ID()] = &r
break
}
......
......@@ -1819,4 +1819,7 @@ func TestGeneratePartitionToRegionMap(t *testing.T) {
if m["aws"].ID() != "us-east-1" {
t.Fatal("expected us-east-1 but received " + m["aws"].ID())
}
if m["aws-us-gov"].ID() != "us-gov-west-1" {
t.Fatal("expected us-gov-west-1 but received " + m["aws-us-gov"].ID())
}
}
......@@ -98,7 +98,18 @@ func (h *CLIHandler) Auth(c *api.Client, m map[string]string) (*api.Secret, erro
headerValue = ""
}
creds, err := RetrieveCreds(m["aws_access_key_id"], m["aws_secret_access_key"], m["aws_security_token"])
logVal, ok := m["log_level"]
if !ok {
logVal = "info"
}
level := hclog.LevelFromString(logVal)
if level == hclog.NoLevel {
return nil, fmt.Errorf("failed to parse 'log_level' value: %q", logVal)
}
hlogger := hclog.Default()
hlogger.SetLevel(level)
creds, err := RetrieveCreds(m["aws_access_key_id"], m["aws_secret_access_key"], m["aws_security_token"], hlogger)
if err != nil {
return nil, err
}
......@@ -128,11 +139,12 @@ func (h *CLIHandler) Auth(c *api.Client, m map[string]string) (*api.Secret, erro
return secret, nil
}
func RetrieveCreds(accessKey, secretKey, sessionToken string) (*credentials.Credentials, error) {
func RetrieveCreds(accessKey, secretKey, sessionToken string, logger hclog.Logger) (*credentials.Credentials, error) {
credConfig := &awsutil.CredentialsConfig{
AccessKey: accessKey,
SecretKey: secretKey,
SessionToken: sessionToken,
Logger: logger,
}
creds, err := credConfig.GenerateCredentialChain()
if err != nil {
......@@ -195,6 +207,10 @@ Configuration:
role=<string>
Name of the role to request a token against
log_level=<string>
Set logging level during AWS credential acquisition. Valid levels are
trace, debug, info, warn, error. Defaults to info.
`
return strings.TrimSpace(help)
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment