diff --git a/.ci/README.md b/.ci/README.md index 8471562475f9..cd78da3f4d5f 100644 --- a/.ci/README.md +++ b/.ci/README.md @@ -6,8 +6,6 @@ These tools manage the downstream repositories of [magic-modules](https://github Currently, they manage: * terraform -* puppet -* chef * ansible # CI For Downstream Developers diff --git a/.ci/ci.yml.tmpl b/.ci/ci.yml.tmpl index d87f661869f9..f2d77b54f05f 100644 --- a/.ci/ci.yml.tmpl +++ b/.ci/ci.yml.tmpl @@ -44,11 +44,13 @@ resources: authorship_restriction: true no_label: automerged - - name: terraform-intermediate +{% for v in vars.terraform_v.itervalues() %} + - name: {{ v.short_name }}-intermediate type: git-branch source: - uri: git@github.com:((github-account.username))/terraform-provider-google.git + uri: git@github.com:((github-account.username))/{{ v.provider_name }}.git private_key: ((repo-key.private_key)) +{% endfor %} - name: ansible-intermediate type: git-branch @@ -62,30 +64,6 @@ resources: uri: git@github.com:((github-account.username))/inspec-gcp.git private_key: ((repo-key.private_key)) -{% for module in vars.puppet_modules %} - - name: puppet-{{module}}-intermediate - type: git-branch - source: -{% if module == '_bundle' %} - uri: git@github.com:((github-account.username))/puppet-google.git -{% else %} - uri: git@github.com:((github-account.username))/puppet-google-{{module}}.git -{% endif %} - private_key: ((repo-key.private_key)) -{% endfor %} - -{% for module in vars.chef_modules %} - - name: chef-{{module}}-intermediate - type: git-branch - source: -{% if module == '_bundle' %} - uri: git@github.com:((github-account.username))/chef-google.git -{% else %} - uri: git@github.com:((github-account.username))/chef-google-{{module}}.git -{% endif %} - private_key: ((repo-key.private_key)) -{% endfor %} - - name: mm-approved-prs type: github-pull-request source: @@ -136,18 +114,25 @@ jobs: status: pending path: magic-modules - aggregate: + {% for k, v in vars.terraform_v.iteritems() %} - do: # consumes: magic-modules-branched # produces: terraform-generated - - task: generate-terraform + - task: generate-{{v.short_name}} file: magic-modules-branched/.ci/magic-modules/generate-terraform.yml - # Puts 'terraform-generated' into the robot's fork. - - put: terraform-intermediate params: - repository: terraform-generated - branch_file: magic-modules-branched/branchname - only_if_diff: true - force: true + VERSION: {{k}} + PROVIDER_NAME: {{v.provider_name}} + SHORT_NAME: {{v.short_name}} + # Puts 'terraform-generated' into the robot's fork. + - aggregate: + - put: {{v.short_name}}-intermediate + params: + repository: terraform-generated/{{k}} + branch_file: magic-modules-branched/branchname + only_if_diff: true + force: true + {% endfor %} - do: # consumes: magic-modules-branched # produces: ansible-generated @@ -172,43 +157,6 @@ jobs: branch_file: magic-modules-branched/branchname only_if_diff: true force: true - {% if vars.puppet_modules %} - - do: - # consumes: magic-modules-branched - # produces: puppet-generated - - task: generate-puppet - file: magic-modules-branched/.ci/magic-modules/generate-puppet.yml - params: - PRODUCTS: {{','.join(vars.puppet_modules)}} - - aggregate: - {% for module in vars.puppet_modules %} - - put: puppet-{{module}}-intermediate - params: - repository: puppet-generated/{{module}} - branch_file: magic-modules-branched/branchname - only_if_diff: true - force: true - {% endfor %} - {% endif %} - - {% if vars.chef_modules %} - - do: - # consumes: magic-modules-branched - # produces: chef-generated - - task: generate-chef - file: magic-modules-branched/.ci/magic-modules/generate-chef.yml - params: - PRODUCTS: {{','.join(vars.chef_modules)}} - - aggregate: - {% for module in vars.chef_modules %} - - put: chef-{{module}}-intermediate - params: - repository: chef-generated/{{module}} - branch_file: magic-modules-branched/branchname - only_if_diff: true - force: true - {% endfor %} - {% endif %} on_failure: put: magic-modules-new-prs params: @@ -224,8 +172,7 @@ jobs: # This needs to match the username for the 'intermediate' resources. GH_USERNAME: ((github-account.username)) CREDS: ((repo-key.private_key)) - PUPPET_MODULES: {{','.join(vars.puppet_modules)}} - CHEF_MODULES: {{','.join(vars.chef_modules)}} + TERRAFORM_VERSIONS: "{{','.join(vars.terraform_properties_serialized)}}" TERRAFORM_ENABLED: true ANSIBLE_ENABLED: true INSPEC_ENABLED: true @@ -243,11 +190,18 @@ jobs: version: every trigger: true params: - submodules: [build/terraform] + submodules: [{{','.join(vars.terraform_submodules)}}] passed: [mm-generate] - - task: test - file: magic-modules/.ci/unit-tests/task.yml - timeout: 30m + - aggregate: + {% for v in vars.terraform_v.itervalues() %} + - task: test-{{v.short_name}} + file: magic-modules/.ci/unit-tests/task.yml + timeout: 30m + params: + PROVIDER_NAME: {{v.provider_name}} + SHORT_NAME: {{v.short_name}} + TEST_DIR: {{v.test_dir}} + {% endfor %} on_failure: do: - get: magic-modules-new-prs @@ -300,70 +254,6 @@ jobs: context: inspec-tests path: magic-modules-new-prs - - name: puppet-test - plan: - - get: magic-modules - version: every - trigger: true - params: - submodules: - {% for module in vars.puppet_submodules %} - - {{module}} - {% endfor %} - passed: [mm-generate] - - aggregate: - {% for module in vars.puppet_modules %} - - task: test-{{module}} - file: magic-modules/.ci/unit-tests/puppet.yml - timeout: 30m - params: - PRODUCT: {{module}} - {%- if vars.puppet_test_excludes.get(module) %} - EXCLUDE_PATTERN: {{','.join(vars.puppet_test_excludes[module])}} - {%- endif %} - {% endfor %} - on_failure: - do: - - get: magic-modules-new-prs - passed: [mm-generate] - - put: magic-modules-new-prs - params: - status: failure - context: puppet-tests - path: magic-modules-new-prs - - - name: chef-test - plan: - - get: magic-modules - version: every - trigger: true - params: - submodules: - {% for module in vars.chef_submodules %} - - {{module}} - {% endfor %} - passed: [mm-generate] - - aggregate: - {% for module in vars.chef_modules %} - - task: test-{{module}} - file: magic-modules/.ci/unit-tests/chef.yml - timeout: 30m - params: - PRODUCT: {{module}} - {%- if vars.chef_test_excludes.get(module) %} - EXCLUDE_PATTERN: {{','.join(vars.chef_test_excludes[module])}} - {%- endif %} - {% endfor %} - on_failure: - do: - - get: magic-modules-new-prs - passed: [mm-generate] - - put: magic-modules-new-prs - params: - status: failure - context: chef-tests - path: magic-modules-new-prs - - name: create-prs plan: - get: magic-modules @@ -373,12 +263,6 @@ jobs: submodules: {{vars.all_submodules_yaml_format}} passed: - mm-generate - {%- if vars.chef_modules %} - - chef-test - {%- endif -%} - {%- if vars.puppet_modules %} - - puppet-test - {%- endif %} - terraform-test - ansible-test - inspec-test @@ -399,14 +283,7 @@ jobs: TERRAFORM_REPO_USER: terraform-providers ANSIBLE_REPO_USER: modular-magician INSPEC_REPO_USER: modular-magician - {%- if vars.puppet_modules %} - PUPPET_REPO_USER: GoogleCloudPlatform - PUPPET_MODULES: {{','.join(vars.puppet_modules)}} - {%- endif %} - {%- if vars.chef_modules %} - CHEF_REPO_USER: GoogleCloudPlatform - CHEF_MODULES: {{','.join(vars.chef_modules)}} - {%- endif %} + TERRAFORM_VERSIONS: "{{','.join(vars.terraform_properties_serialized)}}" on_failure: put: magic-modules-new-prs params: @@ -420,9 +297,10 @@ jobs: # the pipeline (when a PR needs to be updated), this does that updating by pushing # the new code to the repository/branch from which a pull request is already open. - aggregate: - - put: terraform-intermediate + {% for v in vars.terraform_v.itervalues() %} + - put: {{v.short_name}}-intermediate params: - repository: magic-modules-with-comment/build/terraform + repository: magic-modules-with-comment/build/{{ v.short_name }} branch_file: magic-modules-with-comment/original_pr_branch_name # Every time a change runs through this pipeline, it will generate a commit with # a different hash - the hash includes timestamps. Therefore, even if there's no @@ -432,6 +310,7 @@ jobs: # not push the update even though the commit hashes are different. only_if_diff: true force: true + {% endfor %} - put: ansible-intermediate params: repository: magic-modules-with-comment/build/ansible @@ -446,23 +325,6 @@ jobs: # See comment on terraform-intermediate only_if_diff: true force: true - {% for module in vars.puppet_modules %} - - put: puppet-{{module}}-intermediate - params: - repository: magic-modules-with-comment/build/puppet/{{module}} - branch_file: magic-modules-with-comment/original_pr_branch_name - only_if_diff: true - force: true - {% endfor %} - - {% for module in vars.chef_modules %} - - put: chef-{{module}}-intermediate - params: - repository: magic-modules-with-comment/build/chef/{{module}} - branch_file: magic-modules-with-comment/original_pr_branch_name - only_if_diff: true - force: true - {% endfor %} # This isn't load-bearing - it's just aesthetic. It will also be a no-op the first # time through, it works the same way as the preceding push. diff --git a/.ci/magic-modules/create-pr.sh b/.ci/magic-modules/create-pr.sh index 885492e1d0c9..852fdb6c8fed 100755 --- a/.ci/magic-modules/create-pr.sh +++ b/.ci/magic-modules/create-pr.sh @@ -24,8 +24,8 @@ echo "$ORIGINAL_PR_BRANCH" > ./original_pr_branch_name # branch of the terraform submodule. All the submodules will be on the the same # branch name - we pick terraform because it's the first one the magician supported. BRANCH_NAME="$(git config -f .gitmodules --get submodule.build/terraform.branch)" -IFS="," read -ra PUPPET_PRODUCTS <<< "$PUPPET_MODULES" -IFS="," read -ra CHEF_PRODUCTS <<< "$CHEF_MODULES" +IFS="," read -ra TERRAFORM_VERSIONS <<< "$TERRAFORM_VERSIONS" + git checkout -b "$BRANCH_NAME" if [ "$BRANCH_NAME" = "$ORIGINAL_PR_BRANCH" ]; then @@ -34,22 +34,28 @@ if [ "$BRANCH_NAME" = "$ORIGINAL_PR_BRANCH" ]; then # There is no existing PR - this is the first pass through the pipeline and # we will need to create a PR using 'hub'. if [ -n "$TERRAFORM_REPO_USER" ]; then - pushd build/terraform - - git log -1 --pretty=%B > ./downstream_body - echo "" >> ./downstream_body - echo "" >> ./downstream_body - if [ -n "$ORIGINAL_PR_USER" ]; then - echo "/cc @$ORIGINAL_PR_USER" >> ./downstream_body - fi - - git checkout -b "$BRANCH_NAME" - if TF_PR=$(hub pull-request -b "$TERRAFORM_REPO_USER/terraform-provider-google:2.0.0" -F ./downstream_body); then - DEPENDENCIES="${DEPENDENCIES}depends: $TF_PR ${NEWLINE}" - else - echo "Terraform - did not generate a PR." - fi - popd + for VERSION in "${TERRAFORM_VERSIONS[@]}"; do + IFS=":" read -ra TERRAFORM_DATA <<< "$VERSION" + PROVIDER_NAME="${TERRAFORM_DATA[0]}" + SUBMODULE_DIR="${TERRAFORM_DATA[1]}" + + pushd build/$SUBMODULE_DIR + + git log -1 --pretty=%B > ./downstream_body + echo "" >> ./downstream_body + echo "" >> ./downstream_body + if [ -n "$ORIGINAL_PR_USER" ]; then + echo "/cc @$ORIGINAL_PR_USER" >> ./downstream_body + fi + + git checkout -b "$BRANCH_NAME" + if TF_PR=$(hub pull-request -b "$TERRAFORM_REPO_USER/$PROVIDER_NAME:2.0.0" -F ./downstream_body); then + DEPENDENCIES="${DEPENDENCIES}depends: $TF_PR ${NEWLINE}" + else + echo "$SUBMODULE_DIR - did not generate a PR." + fi + popd + done fi if [ -n "$ANSIBLE_REPO_USER" ]; then @@ -90,65 +96,15 @@ if [ "$BRANCH_NAME" = "$ORIGINAL_PR_BRANCH" ]; then popd fi - for PRD in "${PUPPET_PRODUCTS[@]}"; do - - pushd "build/puppet/$PRD" - - git log -1 --pretty=%B > ./downstream_body - echo "" >> ./downstream_body - echo "" >> ./downstream_body - if [ -n "$ORIGINAL_PR_USER" ]; then - echo "/cc @$ORIGINAL_PR_USER" >> ./downstream_body - fi - - git checkout -b "$BRANCH_NAME" - if [[ $PRD == *"_bundle"* ]]; then - repo="puppet-google" - else - repo="puppet-google-$PRD" - fi - if PUP_PR=$(hub pull-request -b "$PUPPET_REPO_USER/$repo:master" -F ./downstream_body); then - DEPENDENCIES="${DEPENDENCIES}depends: $PUP_PR ${NEWLINE}" - else - echo "Puppet $PRD - did not generate a PR." - fi - popd - done - - for PRD in "${CHEF_PRODUCTS[@]}"; do - - pushd "build/chef/$PRD" - - git log -1 --pretty=%B > ./downstream_body - echo "" >> ./downstream_body - echo "" >> ./downstream_body - if [ -n "$ORIGINAL_PR_USER" ]; then - echo "/cc @$ORIGINAL_PR_USER" >> ./downstream_body - fi - - git checkout -b "$BRANCH_NAME" - if [[ $PRD == *"_bundle"* ]]; then - repo="chef-google" - else - repo="chef-google-$PRD" - fi - if CHEF_PR=$(hub pull-request -b "$CHEF_REPO_USER/$repo:master" -F ./downstream_body); then - DEPENDENCIES="${DEPENDENCIES}depends: $CHEF_PR ${NEWLINE}" - else - echo "Chef $PRD - did not generate a PR." - fi - popd - done - if [ -z "$DEPENDENCIES" ]; then - cat << EOF > ./pr_comment + cat << EOF > ./pr_comment I am a robot that works on MagicModules PRs! I checked the downstream repositories (see README.md for which ones I can write to), and none of them seem to have any changes. Once this PR is approved, you can feel free to merge it without taking any further steps. EOF else - cat << EOF > ./pr_comment + cat << EOF > ./pr_comment I am a robot that works on MagicModules PRs! I built this PR into one or more PRs on other repositories, and when those are closed, this PR will also be merged and closed. @@ -163,21 +119,16 @@ else git branch -f "$ORIGINAL_PR_BRANCH" if [ -n "$TERRAFORM_REPO_USER" ]; then - pushd build/terraform - git branch -f "$ORIGINAL_PR_BRANCH" - popd + for VERSION in "${TERRAFORM_VERSIONS[@]}"; do + IFS=":" read -ra TERRAFORM_DATA <<< "$VERSION" + PROVIDER_NAME="${TERRAFORM_DATA[0]}" + SUBMODULE_DIR="${TERRAFORM_DATA[1]}" + pushd "build/$SUBMODULE_DIR" + git branch -f "$ORIGINAL_PR_BRANCH" + popd + done fi - for PRD in "${PUPPET_PRODUCTS[@]}"; do - pushd "build/puppet/$PRD" - git branch -f "$ORIGINAL_PR_BRANCH" - popd - done - for PRD in "${CHEF_PRODUCTS[@]}"; do - pushd "build/chef/$PRD" - git branch -f "$ORIGINAL_PR_BRANCH" - popd - done # Note - we're interested in HEAD~1 here, not HEAD, because HEAD is the # generated code commit. :) cat << EOF > ./pr_comment diff --git a/.ci/magic-modules/create-pr.yml b/.ci/magic-modules/create-pr.yml index 735318704f06..187c8c6aa266 100644 --- a/.ci/magic-modules/create-pr.yml +++ b/.ci/magic-modules/create-pr.yml @@ -25,7 +25,4 @@ params: TERRAFORM_REPO_USER: "" ANSIBLE_REPO_USER: "" INSPEC_REPO_USER: "" - PUPPET_REPO_USER: "" - PUPPET_MODULES: "" - CHEF_REPO_USER: "" - CHEF_MODULES: "" + TERRAFORM_VERSIONS: "" diff --git a/.ci/magic-modules/generate-chef.yml b/.ci/magic-modules/generate-chef.yml deleted file mode 100644 index 1dd5382fa994..000000000000 --- a/.ci/magic-modules/generate-chef.yml +++ /dev/null @@ -1,24 +0,0 @@ ---- -# This file takes two inputs: magic-modules-branched in detached-HEAD state, and the CI repo. -# It spits out "chef-generated", a directory containing chef repos on a new branch -# (named after the HEAD commit on the PR), with the new generated code in it. -platform: linux - -image_resource: - type: docker-image - source: - repository: nmckinley/go-ruby-python - tag: '1.11-2.5-2.7' - -inputs: - - name: magic-modules-branched - -outputs: - - name: chef-generated - -run: - path: magic-modules-branched/.ci/magic-modules/generate-puppet-chef.sh - -params: - PRODUCTS: "" - PROVIDER: chef diff --git a/.ci/magic-modules/generate-puppet-chef.sh b/.ci/magic-modules/generate-puppet-chef.sh deleted file mode 100755 index 7bd2433b3264..000000000000 --- a/.ci/magic-modules/generate-puppet-chef.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash - -# This script takes in 'magic-modules-branched', a git repo tracking the head of a PR against magic-modules. -# It outputs "$PROVIDER-generated", a folder containing at least one non-submodule git repo containing the -# generated puppet/chef code. - -set -x -set -e -source "$(dirname "$0")/helpers.sh" -PATCH_DIR="$(pwd)/patches" - -IFS="," read -ra PRODUCT_ARRAY <<< "$PRODUCTS" -for PRD in "${PRODUCT_ARRAY[@]}"; do - pushd magic-modules-branched - LAST_COMMIT_AUTHOR="$(git log --pretty="%an <%ae>" -n1 HEAD)" - find build/"${PROVIDER}/${PRD}"/ -type f -not -name '.git*' -not -name '.last_run.json' -print0 | xargs -0 rm -rf -- - bundle install - # Running with the --debug flag will cause Concourse to crash - bundle exec compiler -p "products/$PRD" -e "$PROVIDER" -o "build/$PROVIDER/$PRD" - - # This command can crash - if that happens, the script should not fail. - set +e - COMMIT_MSG="$(python .ci/magic-modules/extract_from_pr_description.py --tag "$PROVIDER-$PRD" < .git/body)" - set -e - if [ -z "$COMMIT_MSG" ]; then - COMMIT_MSG="Magic Modules changes." - fi - - pushd "build/$PROVIDER/$PRD" - # These config entries will set the "committer". - git config --global user.email "magic-modules@google.com" - git config --global user.name "Modular Magician" - - git add -A - # Set the "author" to the commit's real author. - git commit -m "$COMMIT_MSG" --author="$LAST_COMMIT_AUTHOR" || true # don't crash if no changes - git checkout -B "$(cat ../../../branchname)" - apply_patches "$PATCH_DIR/GoogleCloudPlatform/$PROVIDER-google-$PRD" "$COMMIT_MSG" "$LAST_COMMIT_AUTHOR" "master" - popd - popd - git clone "magic-modules-branched/build/$PROVIDER/$PRD" "$PROVIDER-generated/$PRD" - -done diff --git a/.ci/magic-modules/generate-puppet.yml b/.ci/magic-modules/generate-puppet.yml deleted file mode 100644 index 768a96f2eaad..000000000000 --- a/.ci/magic-modules/generate-puppet.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -# This file takes two inputs: magic-modules-branched in detached-HEAD state, and the patches. -# It spits out "puppet-generated", a directory containing puppet repos on a new branch -# (named after the HEAD commit on the PR), with the new generated code in it. -platform: linux - -image_resource: - type: docker-image - source: - repository: nmckinley/go-ruby-python - tag: '1.11-2.5-2.7' - -inputs: - - name: magic-modules-branched - - name: patches - -outputs: - - name: puppet-generated - -run: - path: magic-modules-branched/.ci/magic-modules/generate-puppet-chef.sh - -params: - PRODUCTS: "" - PROVIDER: puppet diff --git a/.ci/magic-modules/generate-terraform.sh b/.ci/magic-modules/generate-terraform.sh index 609b623ea994..98e37a7fe695 100755 --- a/.ci/magic-modules/generate-terraform.sh +++ b/.ci/magic-modules/generate-terraform.sh @@ -15,10 +15,10 @@ export GOPATH="${PWD}/go" mkdir -p "${GOPATH}/src/github.com/terraform-providers" pushd magic-modules-branched -ln -s "${PWD}/build/terraform/" "${GOPATH}/src/github.com/terraform-providers/terraform-provider-google" +ln -s "${PWD}/build/$SHORT_NAME/" "${GOPATH}/src/github.com/terraform-providers/$PROVIDER_NAME" popd -pushd "${GOPATH}/src/github.com/terraform-providers/terraform-provider-google" +pushd "${GOPATH}/src/github.com/terraform-providers/$PROVIDER_NAME" go get popd @@ -27,17 +27,17 @@ LAST_COMMIT_AUTHOR="$(git log --pretty="%an <%ae>" -n1 HEAD)" bundle install # Build all terraform products -bundle exec compiler -a -e terraform -o "${GOPATH}/src/github.com/terraform-providers/terraform-provider-google/" +bundle exec compiler -a -e terraform -o "${GOPATH}/src/github.com/terraform-providers/$PROVIDER_NAME/" -v $VERSION # This command can crash - if that happens, the script should not fail. set +e -TERRAFORM_COMMIT_MSG="$(python .ci/magic-modules/extract_from_pr_description.py --tag terraform < .git/body)" +TERRAFORM_COMMIT_MSG="$(python .ci/magic-modules/extract_from_pr_description.py --tag $SHORT_NAME < .git/body)" set -e if [ -z "$TERRAFORM_COMMIT_MSG" ]; then TERRAFORM_COMMIT_MSG="Magic Modules changes." fi -pushd "build/terraform" +pushd "build/$SHORT_NAME" # These config entries will set the "committer". git config --global user.email "magic-modules@google.com" git config --global user.name "Modular Magician" @@ -47,9 +47,9 @@ git add -A git commit -m "$TERRAFORM_COMMIT_MSG" --author="$LAST_COMMIT_AUTHOR" || true # don't crash if no changes git checkout -B "$(cat ../../branchname)" -apply_patches "$PATCH_DIR/terraform-providers/terraform-provider-google" "$TERRAFORM_COMMIT_MSG" "$LAST_COMMIT_AUTHOR" "2.0.0" +apply_patches "$PATCH_DIR/terraform-providers/$PROVIDER_NAME" "$TERRAFORM_COMMIT_MSG" "$LAST_COMMIT_AUTHOR" "2.0.0" popd popd -git clone magic-modules-branched/build/terraform ./terraform-generated +git clone magic-modules-branched/build/$SHORT_NAME ./terraform-generated/$VERSION diff --git a/.ci/magic-modules/generate-terraform.yml b/.ci/magic-modules/generate-terraform.yml index 24b23d20bf21..09c246817517 100644 --- a/.ci/magic-modules/generate-terraform.yml +++ b/.ci/magic-modules/generate-terraform.yml @@ -19,3 +19,8 @@ outputs: run: path: magic-modules-branched/.ci/magic-modules/generate-terraform.sh + +params: + VERSION: "" + PROVIDER_NAME: "" + SHORT_NAME: "" diff --git a/.ci/magic-modules/point-to-submodules.sh b/.ci/magic-modules/point-to-submodules.sh index 39ba92110781..0aa170095f8f 100755 --- a/.ci/magic-modules/point-to-submodules.sh +++ b/.ci/magic-modules/point-to-submodules.sh @@ -13,42 +13,20 @@ chmod 400 ~/github_private_key pushd magic-modules-branched BRANCH="$(cat ./branchname)" # Update this repo to track the submodules we just pushed: -IFS="," read -ra PRODUCT_ARRAY <<< "$PUPPET_MODULES" -for PRD in "${PRODUCT_ARRAY[@]}"; do - git config -f .gitmodules "submodule.build/puppet/$PRD.branch" "$BRANCH" - # Bundle repo does not use the same naming scheme as the others - if [[ $PRD == *"_bundle"* ]]; then - repo="puppet-google" - else - repo="puppet-google-$PRD" - fi - git config -f .gitmodules "submodule.build/puppet/$PRD.url" "git@github.com:$GH_USERNAME/$repo.git" - git submodule sync "build/puppet/$PRD" - ssh-agent bash -c "ssh-add ~/github_private_key; git submodule update --remote --init build/puppet/$PRD" - git add "build/puppet/$PRD" -done - -IFS="," read -ra PRODUCT_ARRAY <<< "$CHEF_MODULES" -for PRD in "${PRODUCT_ARRAY[@]}"; do - git config -f .gitmodules "submodule.build/chef/$PRD.branch" "$BRANCH" - # Bundle repo does not use the same naming scheme as the others - if [[ $PRD == *"_bundle"* ]]; then - repo="chef-google" - else - repo="chef-google-$PRD" - fi - git config -f .gitmodules "submodule.build/chef/$PRD.url" "git@github.com:$GH_USERNAME/$repo.git" - git submodule sync "build/chef/$PRD" - ssh-agent bash -c "ssh-add ~/github_private_key; git submodule update --remote --init build/chef/$PRD" - git add "build/chef/$PRD" -done if [ "$TERRAFORM_ENABLED" = "true" ]; then - git config -f .gitmodules submodule.build/terraform.branch "$BRANCH" - git config -f .gitmodules submodule.build/terraform.url "git@github.com:$GH_USERNAME/terraform-provider-google.git" - git submodule sync build/terraform - ssh-agent bash -c "ssh-add ~/github_private_key; git submodule update --remote --init build/terraform" - git add build/terraform + IFS="," read -ra TERRAFORM_VERSIONS <<< "$TERRAFORM_VERSIONS" + for VERSION in "${TERRAFORM_VERSIONS[@]}"; do + IFS=":" read -ra TERRAFORM_DATA <<< "$VERSION" + PROVIDER_NAME="${TERRAFORM_DATA[0]}" + SUBMODULE_DIR="${TERRAFORM_DATA[1]}" + + git config -f .gitmodules "submodule.build/$SUBMODULE_DIR.branch" "$BRANCH" + git config -f .gitmodules "submodule.build/$SUBMODULE_DIR.url" "git@github.com:$GH_USERNAME/$PROVIDER_NAME.git" + git submodule sync "build/$SUBMODULE_DIR" + ssh-agent bash -c "ssh-add ~/github_private_key; git submodule update --remote --init build/$SUBMODULE_DIR" + git add "build/$SUBMODULE_DIR" + done fi if [ "$ANSIBLE_ENABLED" = "true" ]; then diff --git a/.ci/magic-modules/point-to-submodules.yml b/.ci/magic-modules/point-to-submodules.yml index f91ff55283b8..f76f46400fdb 100644 --- a/.ci/magic-modules/point-to-submodules.yml +++ b/.ci/magic-modules/point-to-submodules.yml @@ -23,8 +23,7 @@ params: GH_USERNAME: "" CREDS: "" TERRAFORM_ENABLED: false + TERRAFORM_VERSIONS: "" ANSIBLE_ENABLED: false INSPEC_ENABLED: false - PUPPET_MODULES: "" - CHEF_MODULES: "" diff --git a/.ci/unit-tests/chef.yml b/.ci/unit-tests/chef.yml deleted file mode 100644 index cc4b5dd9753e..000000000000 --- a/.ci/unit-tests/chef.yml +++ /dev/null @@ -1,14 +0,0 @@ -platform: linux -inputs: - - name: magic-modules -image_resource: - type: docker-image - source: - repository: ruby - tag: '2.5' -run: - path: magic-modules/.ci/unit-tests/puppet-chef.sh -params: - PRODUCT: "" - PROVIDER: chef - EXCLUDE_PATTERN: "" diff --git a/.ci/unit-tests/puppet-chef.sh b/.ci/unit-tests/puppet-chef.sh deleted file mode 100755 index 8e5a0d4002fa..000000000000 --- a/.ci/unit-tests/puppet-chef.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -set -e -set -x - -pushd "magic-modules/build/$PROVIDER/$PRODUCT" -bundle install - -if [ $PROVIDER = "chef" ]; then - # TODO: https://github.com/GoogleCloudPlatform/magic-modules/issues/236 - # Re-enable chef tests by deleting this if block once the tests are fixed. - echo "Skipping tests... See issue #236" -elif [ -z "$EXCLUDE_PATTERN" ]; then - DISABLE_COVERAGE=true bundle exec parallel_rspec spec/ -else - # parallel_rspec doesn't support --exclude_pattern - IFS="," read -ra excluded <<< "$EXCLUDE_PATTERN" - filtered=$(find spec -name '*_spec.rb' $(printf "! -wholename %s " ${excluded[@]})) - DISABLE_COVERAGE=true bundle exec parallel_rspec ${filtered[@]} -fi - -popd diff --git a/.ci/unit-tests/puppet.yml b/.ci/unit-tests/puppet.yml deleted file mode 100644 index d628e72fb032..000000000000 --- a/.ci/unit-tests/puppet.yml +++ /dev/null @@ -1,14 +0,0 @@ -platform: linux -inputs: - - name: magic-modules -image_resource: - type: docker-image - source: - repository: ruby - tag: '2.5' -run: - path: magic-modules/.ci/unit-tests/puppet-chef.sh -params: - PRODUCT: "" - PROVIDER: puppet - EXCLUDE_PATTERN: "" diff --git a/.ci/unit-tests/run.sh b/.ci/unit-tests/run.sh index dbe47f3ea409..d58fd5f866c3 100755 --- a/.ci/unit-tests/run.sh +++ b/.ci/unit-tests/run.sh @@ -9,8 +9,8 @@ set -x # Create GOPATH structure mkdir -p "${GOPATH}/src/github.com/terraform-providers" -ln -s "${PWD}/$1" "${GOPATH}/src/github.com/terraform-providers/terraform-provider-google" +ln -s "${PWD}/magic-modules/build/$SHORT_NAME" "${GOPATH}/src/github.com/terraform-providers/$PROVIDER_NAME" -cd "${GOPATH}/src/github.com/terraform-providers/terraform-provider-google" +cd "${GOPATH}/src/github.com/terraform-providers/$PROVIDER_NAME" -go test -v ./google -parallel 16 -run '^Test' -timeout 1m +go test -v ./$TEST_DIR -parallel 16 -run '^Test' -timeout 1m diff --git a/.ci/unit-tests/task.yml b/.ci/unit-tests/task.yml index 5ee190c9ceab..67e73810c91a 100644 --- a/.ci/unit-tests/task.yml +++ b/.ci/unit-tests/task.yml @@ -8,5 +8,7 @@ image_resource: tag: '1.9.3' run: path: magic-modules/.ci/unit-tests/run.sh - args: - - magic-modules/build/terraform/ +params: + PROVIDER_NAME: "" + SHORT_NAME: "" + TEST_DIR: "" diff --git a/.ci/vars.tmpl b/.ci/vars.tmpl index f5aaa68e4548..5ce55e16cb22 100644 --- a/.ci/vars.tmpl +++ b/.ci/vars.tmpl @@ -1,16 +1,35 @@ {% set chef_modules = ['_bundle', 'auth', 'compute', 'sql', 'storage', 'spanner', 'container', 'dns', 'iam'] %} {% set puppet_modules = ['_bundle', 'auth', 'bigquery', 'compute', 'sql', 'storage', 'spanner', 'container', 'dns', 'pubsub', 'resourcemanager'] %} {% set puppet_no_release = ['bigquery'] %} +{% set terraform_v = { + 'ga': { + 'provider_name': 'terraform-provider-google', + 'short_name': 'terraform', + 'test_dir': 'google' + }, + 'beta': { + 'provider_name': 'terraform-provider-google-beta', + 'short_name': 'terraform-beta', + 'test_dir': 'google-beta' + } + } +%} {% macro names_as_list(repo, names) -%} {% for name in names %} build/{{repo}}/{{name}} {%- endfor %} {% endmacro -%} +{% macro build_folder(names) -%} +{% for name in names %} +build/{{name}} +{%- endfor %} +{% endmacro -%} {% set puppet_submodules = names_as_list('puppet', puppet_modules).split() %} {% set chef_submodules = names_as_list('chef', chef_modules).split() %} +{% set terraform_submodules = build_folder(terraform_v.values()|map(attribute='short_name')).split() %} {% - set all_submodules = puppet_submodules + chef_submodules + - (['build/terraform'] + ['build/ansible'] + ['build/inspec']) + set all_submodules = + (terraform_submodules + ['build/ansible'] + ['build/inspec']) %} {% set all_submodules_yaml_format = '[' + ','.join(all_submodules) + ']' %} {% set chef_test_excludes = { @@ -23,17 +42,9 @@ build/{{repo}}/{{name}} ] } %} -{% set puppet_test_excludes = { - 'compute': [ - 'spec/gcompute_instance_group_manager_provider_spec.rb', - 'spec/gcompute_instance_provider_spec.rb', - 'spec/gcompute_instance_template_provider_spec.rb', - 'spec/gcompute_target_https_proxy_provider_spec.rb', - 'spec/gcompute_target_ssl_proxy_provider_spec.rb', - 'spec/gcompute_vpn_tunnel_provider_spec.rb', - 'spec/puppetlint_spec.rb'], - 'bigquery': [ - 'spec/gbigquery_table_provider_spec.rb' - ] - } -%} +{% macro serialize_terraform_properties(objs) -%} +{% for obj in objs %} +{{obj.provider_name}}:{{obj.short_name}} +{%- endfor %} +{% endmacro -%} +{% set terraform_properties_serialized = serialize_terraform_properties(terraform_v.values()).split() %} diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index bfab3ccbe1b1..780306080bde 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,6 +1,5 @@ - + + ----------------------------------------------------------------- # [all] ## [terraform] -## [puppet] -### [puppet-bigquery] -### [puppet-compute] -### [puppet-container] -### [puppet-dns] -### [puppet-logging] -### [puppet-pubsub] -### [puppet-resourcemanager] -### [puppet-sql] -### [puppet-storage] -## [chef] -### [chef-compute] -### [chef-container] -### [chef-dns] -### [chef-logging] -### [chef-spanner] -### [chef-sql] -### [chef-storage] +### [terraform-beta] ## [ansible] ## [inspec] diff --git a/.gitmodules b/.gitmodules index a0c2eed20449..1daee31cfa6d 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,69 +1,3 @@ -[submodule "build/puppet/dns"] - path = build/puppet/dns - url = git@github.com:GoogleCloudPlatform/puppet-google-dns -[submodule "build/puppet/auth"] - path = build/puppet/auth - url = git@github.com:GoogleCloudPlatform/puppet-google-auth -[submodule "build/puppet/compute"] - path = build/puppet/compute - url = git@github.com:GoogleCloudPlatform/puppet-google-compute -[submodule "build/chef/dns"] - path = build/chef/dns - url = git@github.com:GoogleCloudPlatform/chef-google-dns -[submodule "build/chef/compute"] - path = build/chef/compute - url = git@github.com:GoogleCloudPlatform/chef-google-compute -[submodule "build/puppet/storage"] - path = build/puppet/storage - url = git@github.com:GoogleCloudPlatform/puppet-google-storage -[submodule "build/puppet/sql"] - path = build/puppet/sql - url = git@github.com:GoogleCloudPlatform/puppet-google-sql -[submodule "build/puppet/container"] - path = build/puppet/container - url = git@github.com:GoogleCloudPlatform/puppet-google-container -[submodule "build/puppet/_bundle"] - path = build/puppet/_bundle - url = git@github.com:GoogleCloudPlatform/puppet-google -[submodule "build/chef/auth"] - path = build/chef/auth - url = git@github.com:GoogleCloudPlatform/chef-google-auth -[submodule "build/chef/storage"] - path = build/chef/storage - url = git@github.com:GoogleCloudPlatform/chef-google-storage -[submodule "build/chef/container"] - path = build/chef/container - url = git@github.com:GoogleCloudPlatform/chef-google-container -[submodule "build/chef/sql"] - path = build/chef/sql - url = git@github.com:GoogleCloudPlatform/chef-google-sql -[submodule "build/puppet/logging"] - path = build/puppet/logging - url = git@github.com:GoogleCloudPlatform/puppet-google-logging -[submodule "build/chef/_bundle"] - path = build/chef/_bundle - url = git@github.com:GoogleCloudPlatform/chef-google -[submodule "build/puppet/pubsub"] - path = build/puppet/pubsub - url = git@github.com:GoogleCloudPlatform/puppet-google-pubsub -[submodule "build/chef/pubsub"] - path = build/chef/pubsub - url = git@github.com:GoogleCloudPlatform/chef-google-pubsub -[submodule "build/puppet/spanner"] - path = build/puppet/spanner - url = git@github.com:GoogleCloudPlatform/puppet-google-spanner -[submodule "build/puppet/resourcemanager"] - path = build/puppet/resourcemanager - url = git@github.com:GoogleCloudPlatform/puppet-google-resourcemanager -[submodule "build/chef/resourcemanager"] - path = build/chef/resourcemanager - url = git@github.com:GoogleCloudPlatform/chef-google-resourcemanager -[submodule "build/puppet/iam"] - path = build/puppet/iam - url = git@github.com:GoogleCloudPlatform/puppet-google-iam -[submodule "build/chef/logging"] - path = build/chef/logging - url = git@github.com:GoogleCloudPlatform/chef-google-logging [submodule "build/ansible"] path = build/ansible url = git@github.com:modular-magician/ansible @@ -72,15 +6,10 @@ path = build/terraform url = git@github.com:terraform-providers/terraform-provider-google.git branch = 2.0.0 -[submodule "build/puppet/bigquery"] - path = build/puppet/bigquery - url = git@github.com:GoogleCloudPlatform/puppet-google-bigquery.git -[submodule "build/chef/spanner"] - path = build/chef/spanner - url = git@github.com:GoogleCloudPlatform/chef-google-spanner.git -[submodule "build/chef/iam"] - path = build/chef/iam - url = git@github.com:GoogleCloudPlatform/chef-google-iam.git +[submodule "build/terraform-beta"] + path = build/terraform-beta + url = git@github.com:terraform-providers/terraform-provider-google-beta.git + branch = 2.0.0 [submodule "build/inspec"] path = build/inspec url = git@github.com:modular-magician/inspec-gcp.git diff --git a/Gemfile b/Gemfile index 104dab0840e5..4b645e2e35c3 100644 --- a/Gemfile +++ b/Gemfile @@ -1,5 +1,6 @@ source 'https://rubygems.org' +gem 'activesupport' gem 'binding_of_caller' gem 'rake' diff --git a/Gemfile.lock b/Gemfile.lock index e6a8ae0f02f0..bd0b1efcdf7b 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,14 +1,23 @@ GEM remote: https://rubygems.org/ specs: + activesupport (5.2.1) + concurrent-ruby (~> 1.0, >= 1.0.2) + i18n (>= 0.7, < 2) + minitest (~> 5.1) + tzinfo (~> 1.1) ast (2.4.0) binding_of_caller (0.8.0) debug_inspector (>= 0.0.1) + concurrent-ruby (1.0.5) debug_inspector (0.0.3) diff-lcs (1.3) docile (1.3.1) + i18n (1.1.1) + concurrent-ruby (~> 1.0) json (2.1.0) metaclass (0.0.4) + minitest (5.11.3) mocha (1.3.0) metaclass (~> 0.0.1) parallel (1.12.1) @@ -45,12 +54,16 @@ GEM json (>= 1.8, < 3) simplecov-html (~> 0.10.0) simplecov-html (0.10.2) + thread_safe (0.3.6) + tzinfo (1.2.5) + thread_safe (~> 0.1) unicode-display_width (1.4.0) PLATFORMS ruby DEPENDENCIES + activesupport binding_of_caller mocha (~> 1.3.0) parallel_tests @@ -60,4 +73,4 @@ DEPENDENCIES simplecov BUNDLED WITH - 1.16.2 + 1.16.5 diff --git a/api/resource.rb b/api/resource.rb index 6ce933b2a936..33e4a8360b59 100644 --- a/api/resource.rb +++ b/api/resource.rb @@ -42,6 +42,10 @@ module Properties # GET/DELETE requests only. In particular, this is often used # to add query parameters. attr_reader :self_link_query + # This is the type of response from the collection URL. It contains + # the name of the list of items within the json, as well as the + # type that this list should be. This is of type Api::Resource::ResponseList + attr_reader :collection_url_response # This is an array with items that uniquely identify the resource. # This is useful in case an API returns a list result and we need # to fetch the particular resource we're interested in from that diff --git a/api/type.rb b/api/type.rb index 8bebe90df7be..0c13a411a51d 100644 --- a/api/type.rb +++ b/api/type.rb @@ -310,6 +310,12 @@ def requires end [property_file] end + + def exclude_if_not_in_version(version) + super + @item_type.exclude_if_not_in_version(version) \ + if @item_type.is_a? NestedObject + end end # Represents an enum, and store is valid values @@ -499,6 +505,11 @@ def all_properties def properties @properties.reject(&:exclude) end + + def exclude_if_not_in_version(version) + super + @properties.each { |p| p.exclude_if_not_in_version(version) } + end end # Represents an array of name=value pairs, and stores its items' type diff --git a/build/ansible b/build/ansible index f83c565380f4..d7ae9f0c705f 160000 --- a/build/ansible +++ b/build/ansible @@ -1 +1 @@ -Subproject commit f83c565380f420564e797d8028fe493223b80f39 +Subproject commit d7ae9f0c705fc34f8b74ca53db2e77a0637b3930 diff --git a/build/chef/_bundle b/build/chef/_bundle deleted file mode 160000 index c66e5f461d0a..000000000000 --- a/build/chef/_bundle +++ /dev/null @@ -1 +0,0 @@ -Subproject commit c66e5f461d0acb3516fddc85e20a47bd4166d514 diff --git a/build/chef/auth b/build/chef/auth deleted file mode 160000 index db0963b09681..000000000000 --- a/build/chef/auth +++ /dev/null @@ -1 +0,0 @@ -Subproject commit db0963b09681cc0c5db9211075405c11f37e28d6 diff --git a/build/chef/compute b/build/chef/compute deleted file mode 160000 index 8105b0179d4a..000000000000 --- a/build/chef/compute +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 8105b0179d4aa34d7e6bcde46897951805fbae62 diff --git a/build/chef/container b/build/chef/container deleted file mode 160000 index 8bca9e6a9ad1..000000000000 --- a/build/chef/container +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 8bca9e6a9ad1a5964c9dd4158d9d86be88bac8d1 diff --git a/build/chef/dns b/build/chef/dns deleted file mode 160000 index 6d47e7dca4a3..000000000000 --- a/build/chef/dns +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 6d47e7dca4a3bd44a4d6072534d10cb3555fb43a diff --git a/build/chef/iam b/build/chef/iam deleted file mode 160000 index b3482ef20a59..000000000000 --- a/build/chef/iam +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b3482ef20a59ce7ed18550c24f2ada4894c8ef25 diff --git a/build/chef/logging b/build/chef/logging deleted file mode 160000 index f286b6943ae2..000000000000 --- a/build/chef/logging +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f286b6943ae2f2559dd7d1cf92f8b2167eafd03c diff --git a/build/chef/pubsub b/build/chef/pubsub deleted file mode 160000 index 5d0ed5006fad..000000000000 --- a/build/chef/pubsub +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 5d0ed5006fada25597e08081d36dde1e25815c51 diff --git a/build/chef/resourcemanager b/build/chef/resourcemanager deleted file mode 160000 index bef20e8cbf54..000000000000 --- a/build/chef/resourcemanager +++ /dev/null @@ -1 +0,0 @@ -Subproject commit bef20e8cbf54111a7f9bdf047f8f3e7c197d28fe diff --git a/build/chef/spanner b/build/chef/spanner deleted file mode 160000 index 9b4772e37e9d..000000000000 --- a/build/chef/spanner +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 9b4772e37e9d4368e1ceb722739fc9569b9468a4 diff --git a/build/chef/sql b/build/chef/sql deleted file mode 160000 index 185b30def30c..000000000000 --- a/build/chef/sql +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 185b30def30cdd99442ae6b84127e638c8b12931 diff --git a/build/chef/storage b/build/chef/storage deleted file mode 160000 index b06ec3d89a51..000000000000 --- a/build/chef/storage +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b06ec3d89a5184ee6a8a34ed50bb737f52cc3e2c diff --git a/build/inspec b/build/inspec index f359ebbf01dc..4ccf92e246d7 160000 --- a/build/inspec +++ b/build/inspec @@ -1 +1 @@ -Subproject commit f359ebbf01dc1294dc5338ad2dc380a888a14563 +Subproject commit 4ccf92e246d70155e085386808a36ba440e2a28f diff --git a/build/puppet/_bundle b/build/puppet/_bundle deleted file mode 160000 index 5253e2dee991..000000000000 --- a/build/puppet/_bundle +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 5253e2dee9911aa3d101bd701fca292f7c884388 diff --git a/build/puppet/auth b/build/puppet/auth deleted file mode 160000 index e81a7ed65410..000000000000 --- a/build/puppet/auth +++ /dev/null @@ -1 +0,0 @@ -Subproject commit e81a7ed6541017b56805b1720ad4c3d15f185dcb diff --git a/build/puppet/bigquery b/build/puppet/bigquery deleted file mode 160000 index b09e4449241e..000000000000 --- a/build/puppet/bigquery +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b09e4449241e5d7292206d5110f7b9377eca1594 diff --git a/build/puppet/compute b/build/puppet/compute deleted file mode 160000 index 715e583ddc5a..000000000000 --- a/build/puppet/compute +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 715e583ddc5ab56babb2f0f291de06cc40a036ff diff --git a/build/puppet/container b/build/puppet/container deleted file mode 160000 index 2fc0f96ca642..000000000000 --- a/build/puppet/container +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2fc0f96ca6424b106bfb26946f3f39036947c80c diff --git a/build/puppet/dns b/build/puppet/dns deleted file mode 160000 index f77dcd63f6f6..000000000000 --- a/build/puppet/dns +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f77dcd63f6f6989938a9f15ca52700c361f6e9d9 diff --git a/build/puppet/iam b/build/puppet/iam deleted file mode 160000 index fb0833158347..000000000000 --- a/build/puppet/iam +++ /dev/null @@ -1 +0,0 @@ -Subproject commit fb083315834774c8af4b37ed24ee94653179901a diff --git a/build/puppet/logging b/build/puppet/logging deleted file mode 160000 index 70bf6882c8d9..000000000000 --- a/build/puppet/logging +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 70bf6882c8d9ab4eba59006942838751f1d5443a diff --git a/build/puppet/pubsub b/build/puppet/pubsub deleted file mode 160000 index 3e8cad955283..000000000000 --- a/build/puppet/pubsub +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 3e8cad955283c419f955a350648129ff38e0aa2e diff --git a/build/puppet/resourcemanager b/build/puppet/resourcemanager deleted file mode 160000 index f2d8df9c7327..000000000000 --- a/build/puppet/resourcemanager +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f2d8df9c73273492692c46af72a44850edbf4c88 diff --git a/build/puppet/spanner b/build/puppet/spanner deleted file mode 160000 index 91bf28d8aadd..000000000000 --- a/build/puppet/spanner +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 91bf28d8aadd64a546518e2bc9df002f8988e339 diff --git a/build/puppet/sql b/build/puppet/sql deleted file mode 160000 index 056fab949365..000000000000 --- a/build/puppet/sql +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 056fab94936597e0f002b3ae8cb6b92c280de703 diff --git a/build/puppet/storage b/build/puppet/storage deleted file mode 160000 index 3dcf41e31955..000000000000 --- a/build/puppet/storage +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 3dcf41e319558a971f0f3a7c1b87b670ed0bd3fa diff --git a/build/terraform b/build/terraform index d622ec466e68..e394da6e838f 160000 --- a/build/terraform +++ b/build/terraform @@ -1 +1 @@ -Subproject commit d622ec466e688b09c601d34f5ce1c7e5796dadf3 +Subproject commit e394da6e838faa1bfb21f9d7e8d6e0530a5b7454 diff --git a/build/terraform-beta b/build/terraform-beta new file mode 160000 index 000000000000..ed7f4780cc0e --- /dev/null +++ b/build/terraform-beta @@ -0,0 +1 @@ +Subproject commit ed7f4780cc0e6db20fb4c4c31b76bf65e9b2e937 diff --git a/products/compute/ansible.yaml b/products/compute/ansible.yaml index 57a8a2c59958..fea8499e16a2 100644 --- a/products/compute/ansible.yaml +++ b/products/compute/ansible.yaml @@ -132,6 +132,10 @@ overrides: !ruby/object:Provider::ResourceOverrides Disk: !ruby/object:Provider::Ansible::ResourceOverride editable: false properties: + sourceSnapshot: !ruby/object:Provider::Ansible::PropertyOverride + description: | + The source snapshot used to create this disk. You can provide this as + a partial or full URL to the resource. labels: !ruby/object:Provider::Ansible::PropertyOverride version_added: '2.7' type: !ruby/object:Provider::Ansible::PropertyOverride @@ -276,6 +280,11 @@ overrides: !ruby/object:Provider::ResourceOverrides version_added: '2.8' RegionDisk: !ruby/object:Provider::Ansible::ResourceOverride version_added: '2.8' + properties: + sourceSnapshot: !ruby/object:Provider::Ansible::PropertyOverride + description: | + The source snapshot used to create this disk. You can provide this as + a partial or full URL to the resource. Route: !ruby/object:Provider::Ansible::ResourceOverride properties: description: !ruby/object:Provider::Ansible::PropertyOverride diff --git a/products/compute/api.yaml b/products/compute/api.yaml index 337381d54768..15dc95438aa1 100644 --- a/products/compute/api.yaml +++ b/products/compute/api.yaml @@ -31,6 +31,9 @@ objects: name: 'Address' kind: 'compute#address' base_url: projects/{{project}}/regions/{{region}}/addresses + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#addressList' + items: 'items' exports: - !ruby/object:Api::Type::FetchedExternal name: address @@ -151,6 +154,9 @@ objects: kind: 'compute#autoscaler' base_url: projects/{{project}}/zones/{{zone}}/autoscalers update_url: projects/{{project}}/zones/{{zone}}/autoscalers?autoscaler={{name}} + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#autoscalerList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: selfLink @@ -187,6 +193,9 @@ objects: name: 'BackendBucket' kind: 'compute#backendBucket' base_url: projects/{{project}}/global/backendBuckets + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#backendBucketLists' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -240,6 +249,9 @@ objects: name: 'BackendService' kind: 'compute#backendService' base_url: projects/{{project}}/global/backendServices + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#backendServicesList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -539,6 +551,9 @@ objects: name: 'DiskType' kind: 'compute#diskType' base_url: projects/{{project}}/zones/{{zone}}/diskTypes + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#diskTypeList' + items: 'items' # TODO(nelsonjr): Search all documentation for references of using URL (like # the description below) and replace with the proper reference to the # corresponding type. @@ -572,6 +587,9 @@ objects: kind: 'compute#disk' input: true base_url: projects/{{project}}/zones/{{zone}}/disks + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#diskList' + items: 'items' exports: - 'name' - !ruby/object:Api::Type::SelfLink @@ -627,6 +645,12 @@ objects: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true + - !ruby/object:Api::Type::String + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + name: 'kmsKeyName' + min_version: beta + description: | + The name of the encryption key that is stored in Google Cloud KMS. input: true - !ruby/object:Api::Type::String name: 'sourceImageId' @@ -681,6 +705,9 @@ objects: name: 'Firewall' kind: 'compute#firewall' base_url: projects/{{project}}/global/firewalls + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#firewallList' + items: 'items' update_verb: :PATCH exports: - !ruby/object:Api::Type::SelfLink @@ -917,6 +944,9 @@ objects: name: 'ForwardingRule' kind: 'compute#forwardingRule' base_url: projects/{{project}}/regions/{{region}}/forwardingRules + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#forwardingRuleList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -1010,6 +1040,9 @@ objects: name: 'GlobalAddress' kind: 'compute#address' base_url: projects/{{project}}/global/addresses + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#addressList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -1134,6 +1167,9 @@ objects: name: 'GlobalForwardingRule' kind: 'compute#forwardingRule' base_url: projects/{{project}}/global/forwardingRules + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#forwardingRulesList' + items: 'items' description: | Represents a GlobalForwardingRule resource. Global forwarding rules are used to forward traffic to the correct load balancer for HTTP load @@ -1168,6 +1204,9 @@ objects: name: 'HttpHealthCheck' kind: 'compute#httpHealthCheck' base_url: projects/{{project}}/global/httpHealthChecks + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#httpHealthCheckList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -1249,6 +1288,9 @@ objects: name: 'HttpsHealthCheck' kind: 'compute#httpsHealthCheck' base_url: projects/{{project}}/global/httpsHealthChecks + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#httpsHealthCheckList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -1329,6 +1371,9 @@ objects: name: 'HealthCheck' kind: 'compute#healthCheck' base_url: projects/{{project}}/global/healthChecks + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#healthCheckList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -1449,6 +1494,9 @@ objects: kind: 'compute#instanceTemplate' input: true base_url: projects/{{project}}/global/instanceTemplates + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#instanceTemplatesListResponse' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -1530,6 +1578,9 @@ objects: name: 'License' kind: 'compute#license' base_url: /projects/{{project}}/global/licenses + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#TODO???' + items: 'items' readonly: true exports: - !ruby/object:Api::Type::SelfLink @@ -1555,6 +1606,9 @@ objects: name: 'Image' kind: 'compute#image' base_url: projects/{{project}}/global/images + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#imageList' + items: 'items' description: | Represents an Image resource. @@ -1702,6 +1756,12 @@ objects: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true + - !ruby/object:Api::Type::String + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + name: 'kmsKeyName' + min_version: beta + description: | + The name of the encryption key that is stored in Google Cloud KMS. # TODO(alexstephen): Change to ResourceRef with array support - !ruby/object:Api::Type::Array name: 'licenses' @@ -1768,6 +1828,12 @@ objects: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true + - !ruby/object:Api::Type::String + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + name: 'kmsKeyName' + min_version: beta + description: | + The name of the encryption key that is stored in Google Cloud KMS. - !ruby/object:Api::Type::String name: 'sourceDiskId' description: | @@ -1786,6 +1852,9 @@ objects: name: 'Instance' kind: 'compute#instance' base_url: projects/{{project}}/zones/{{zone}}/instances + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#instanceList' + items: 'items' input: true exports: - !ruby/object:Api::Type::SelfLink @@ -1892,6 +1961,9 @@ objects: name: 'InstanceGroup' kind: 'compute#instanceGroup' base_url: projects/{{project}}/zones/{{zone}}/instanceGroups + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#instanceGroupList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -1990,6 +2062,9 @@ objects: name: 'InstanceGroupManager' kind: 'compute#instanceGroupManager' base_url: projects/{{project}}/zones/{{zone}}/instanceGroupManagers + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#instanceGroupManagerList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -2173,6 +2248,9 @@ objects: name: 'InterconnectAttachment' kind: 'compute#interconnectAttachment' base_url: 'projects/{{project}}/regions/{{region}}/interconnectAttachments' + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#interconnectAttachementList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -2273,6 +2351,9 @@ objects: name: 'MachineType' kind: 'compute#machineType' base_url: projects/{{project}}/zones/{{zone}}/machineTypes + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#machineTypeList' + items: 'items' exports: - 'name' - !ruby/object:Api::Type::SelfLink @@ -2383,6 +2464,9 @@ objects: name: 'Network' kind: 'compute#network' base_url: projects/{{project}}/global/networks + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#networkList' + items: 'items' update_verb: :PATCH exports: - !ruby/object:Api::Type::SelfLink @@ -2495,6 +2579,9 @@ objects: name: 'Region' kind: 'compute#region' base_url: projects/{{project}}/regions + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#regionList' + items: 'items' exports: - name - !ruby/object:Api::Type::SelfLink @@ -2573,6 +2660,9 @@ objects: kind: 'compute#autoscaler' base_url: projects/{{project}}/regions/{{region}}/autoscalers update_url: projects/{{project}}/regions/{{region}}/autoscalers?autoscaler={{name}} + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#autoscalerList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: selfLink @@ -2610,6 +2700,9 @@ objects: name: 'RegionDiskType' kind: 'compute#diskType' base_url: projects/{{project}}/regions/{{region}}/diskTypes + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#diskTypeList' + items: 'items' description: | Represents a regional DiskType resource. A DiskType resource represents the type of disk to use, such as a pd-ssd or pd-standard. To reference a @@ -2632,6 +2725,9 @@ objects: kind: 'compute#disk' input: true base_url: projects/{{project}}/regions/{{region}}/disks + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#diskList' + items: 'items' exports: - 'name' - !ruby/object:Api::Type::SelfLink @@ -2698,6 +2794,9 @@ objects: name: 'Route' kind: 'compute#route' base_url: projects/{{project}}/global/routes + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#routeList' + items: 'items' input: true exports: - !ruby/object:Api::Type::SelfLink @@ -2822,6 +2921,9 @@ objects: name: 'Router' kind: 'compute#router' base_url: projects/{{project}}/regions/{{region}}/routers + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#routerList' + items: 'items' # Since Terraform has separate resources for router, router interface, and # router peer, calling PUT on the router will delete the interface and peer. # Use patch instead. @@ -2945,6 +3047,9 @@ objects: name: 'Snapshot' kind: 'compute#snapshot' base_url: projects/{{project}}/global/snapshots + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#snapshotList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -2995,6 +3100,12 @@ objects: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true + - !ruby/object:Api::Type::String + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + name: 'kmsKeyName' + min_version: beta + description: | + The name of the encryption key that is stored in Google Cloud KMS. input: true - !ruby/object:Api::Type::NestedObject name: 'sourceDiskEncryptionKey' @@ -3014,6 +3125,12 @@ objects: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true + - !ruby/object:Api::Type::String + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + name: 'kmsKeyName' + min_version: beta + description: | + The name of the encryption key that is stored in Google Cloud KMS. input: true properties: - !ruby/object:Api::Type::Time @@ -3073,6 +3190,9 @@ objects: name: 'SslCertificate' kind: 'compute#sslCertificate' base_url: projects/{{project}}/global/sslCertificates + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#sslCertificateList' + items: 'items' references: !ruby/object:Api::Resource::ReferenceLinks guides: 'Official Documentation': 'https://cloud.google.com/load-balancing/docs/ssl-certificates' @@ -3125,6 +3245,9 @@ objects: # TODO(https://github.com/GoogleCloudPlatform/magic-modules/issues/173): Enable kind: 'compute#sslPolicy' base_url: projects/{{project}}/global/sslPolicies + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#sslPoliciesList' + items: 'items' update_verb: :PATCH exports: - !ruby/object:Api::Type::SelfLink @@ -3224,6 +3347,9 @@ objects: name: 'Subnetwork' kind: 'compute#subnetwork' base_url: projects/{{project}}/regions/{{region}}/subnetworks + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#subnetworkList' + items: 'items' input: true exports: - !ruby/object:Api::Type::SelfLink @@ -3379,6 +3505,9 @@ objects: name: 'TargetHttpProxy' kind: 'compute#targetHttpProxy' base_url: projects/{{project}}/global/targetHttpProxies + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#targetHttpProxyList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -3429,6 +3558,9 @@ objects: name: 'TargetHttpsProxy' kind: 'compute#targetHttpsProxy' base_url: projects/{{project}}/global/targetHttpsProxies + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#targetHttpProxyList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -3524,6 +3656,9 @@ objects: name: 'TargetPool' kind: 'compute#targetPool' base_url: projects/{{project}}/regions/{{region}}/targetPools + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#targetPoolList' + items: 'items' description: 'Represents a TargetPool resource, used for Load Balancing.' exports: - !ruby/object:Api::Type::SelfLink @@ -3648,6 +3783,9 @@ objects: name: 'TargetSslProxy' kind: 'compute#targetSslProxy' base_url: projects/{{project}}/global/targetSslProxies + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#targetSslProxyList' + items: 'items' input: true exports: - !ruby/object:Api::Type::SelfLink @@ -3737,6 +3875,9 @@ objects: name: 'TargetTcpProxy' kind: 'compute#targetTcpProxy' base_url: projects/{{project}}/global/targetTcpProxies + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#targetTcpProxyList' + items: 'items' input: true exports: - !ruby/object:Api::Type::SelfLink @@ -3799,6 +3940,9 @@ objects: name: 'TargetVpnGateway' kind: 'compute#targetVpnGateway' base_url: projects/{{project}}/regions/{{region}}/targetVpnGateways + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#targetVpnGatewayList' + items: 'items' input: true exports: - !ruby/object:Api::Type::SelfLink @@ -3874,6 +4018,9 @@ objects: name: 'UrlMap' kind: 'compute#urlMap' base_url: projects/{{project}}/global/urlMaps + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#urlMapList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -4022,6 +4169,9 @@ objects: description: 'VPN tunnel resource.' input: true base_url: projects/{{project}}/regions/{{region}}/vpnTunnels + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#vpnTunnelList' + items: 'items' exports: - !ruby/object:Api::Type::SelfLink name: 'selfLink' @@ -4137,6 +4287,9 @@ objects: name: 'Zone' kind: 'compute#zone' base_url: projects/{{project}}/zones + collection_url_response: !ruby/object:Api::Resource::ResponseList + kind: 'compute#zoneList' + items: 'items' exports: - name - !ruby/object:Api::Type::SelfLink diff --git a/products/compute/disk_parameters.yaml b/products/compute/disk_parameters.yaml index 7e7bdc39a31b..3b3fe3190b90 100644 --- a/products/compute/disk_parameters.yaml +++ b/products/compute/disk_parameters.yaml @@ -37,6 +37,12 @@ The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. output: true + - !ruby/object:Api::Type::String + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + name: 'kmsKeyName' + min_version: beta + description: | + The name of the encryption key that is stored in Google Cloud KMS. input: true - !ruby/object:Api::Type::ResourceRef name: 'sourceSnapshot' @@ -62,6 +68,12 @@ description: | Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to either encrypt or decrypt this resource. + - !ruby/object:Api::Type::String + # TODO(chrisst) Change to ResourceRef once KMS is in Magic Modules + name: 'kmsKeyName' + min_version: beta + description: | + The name of the encryption key that is stored in Google Cloud KMS. - !ruby/object:Api::Type::String name: 'sha256' description: | diff --git a/products/compute/terraform.yaml b/products/compute/terraform.yaml index ef7eba64e461..f92a9957394c 100644 --- a/products/compute/terraform.yaml +++ b/products/compute/terraform.yaml @@ -16,23 +16,26 @@ overrides: !ruby/object:Provider::ResourceOverrides Address: !ruby/object:Provider::Terraform::ResourceOverride id_format: "{{project}}/{{region}}/{{name}}" example: - - !ruby/object:Provider::Terraform::Examples - name: "address_basic" - primary_resource_id: "ip_address" - vars: - address_name: "my-address" - - !ruby/object:Provider::Terraform::Examples - name: "address_with_subnetwork" - primary_resource_id: "internal_with_subnet_and_address" - vars: - address_name: "my-internal-address" - network_name: "my-network" - subnetwork_name: "my-subnet" - # TODO(rileykarson): Remove this example when instance is supported - - !ruby/object:Provider::Terraform::Examples - name: "instance_with_ip" - primary_resource_id: "static" - vars: + - !ruby/object:Provider::Terraform::Examples + name: "address_basic" + primary_resource_id: "ip_address" + version: <%= _version_name %> + vars: + address_name: "my-address" + - !ruby/object:Provider::Terraform::Examples + name: "address_with_subnetwork" + primary_resource_id: "internal_with_subnet_and_address" + version: <%= _version_name %> + vars: + address_name: "my-internal-address" + network_name: "my-network" + subnetwork_name: "my-subnet" + # TODO(rileykarson): Remove this example when instance is supported + - !ruby/object:Provider::Terraform::Examples + name: "instance_with_ip" + primary_resource_id: "static" + version: <%= _version_name %> + vars: address_name: "ipv4-address" instance_name: "vm-instance" properties: @@ -69,6 +72,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "autoscaler_basic" primary_resource_id: "foobar" + version: <%= _version_name %> vars: autoscaler_name: "my-autoscaler" instance_template_name: "my-instance-template" @@ -86,6 +90,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "backend_bucket_basic" primary_resource_id: "image_backend" + version: <%= _version_name %> vars: backend_bucket_name: "image-backend-bucket" bucket_name: "image-store-bucket" @@ -109,6 +114,30 @@ overrides: !ruby/object:Provider::ResourceOverrides override_order: -1 name: image diff_suppress_func: 'diskImageDiffSuppress' + diskEncryptionKey.kmsKeyName: !ruby/object:Provider::Terraform::PropertyOverride + diff_suppress_func: 'compareSelfLinkRelativePaths' + name: "kmsKeySelfLink" + description: | + The self link of the encryption key used to encrypt the disk. Also called KmsKeyName + in the cloud console. In order to use this additional + IAM permissions need to be set on the Compute Engine Service Agent. See + https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys + sourceSnapshotEncryptionKey.kmsKeyName: !ruby/object:Provider::Terraform::PropertyOverride + diff_suppress_func: 'compareSelfLinkRelativePaths' + name: "kmsKeySelfLink" + description: | + The self link of the encryption key used to encrypt the disk. Also called KmsKeyName + in the cloud console. In order to use this additional + IAM permissions need to be set on the Compute Engine Service Agent. See + https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys + sourceImageEncryptionKey.kmsKeyName: !ruby/object:Provider::Terraform::PropertyOverride + diff_suppress_func: 'compareSelfLinkRelativePaths' + name: "kmsKeySelfLink" + description: | + The self link of the encryption key used to encrypt the disk. Also called KmsKeyName + in the cloud console. In order to use this additional + IAM permissions need to be set on the Compute Engine Service Agent. See + https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys image: !ruby/object:Provider::Terraform::PropertyOverride override_order: 5 description: | @@ -138,6 +167,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "disk_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: disk_name: "test-disk" DiskType: !ruby/object:Provider::Terraform::ResourceOverride @@ -147,6 +177,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "firewall_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: firewall_name: "test-firewall" network_name: "test-network" @@ -211,6 +242,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "forwarding_rule_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: forwarding_rule_name: "website-forwarding-rule" target_pool_name: "website-target-pool" @@ -259,6 +291,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "global_address_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: global_address_name: "global-appserver-ip" properties: @@ -289,6 +322,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "http_health_check_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: http_health_check_name: "authentication-health-check" properties: @@ -319,6 +353,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "https_health_check_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: https_health_check_name: "authentication-health-check" properties: @@ -341,6 +376,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "health_check_basic" primary_resource_id: "internal-health-check" + version: <%= _version_name %> vars: health_check_name: "internal-service-health-check" custom_code: !ruby/object:Provider::Terraform::CustomCode @@ -457,6 +493,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "region_autoscaler_basic" primary_resource_id: "foobar" + version: <%= _version_name %> vars: region_autoscaler_name: "my-region-autoscaler" instance_template_name: "my-instance-template" @@ -488,6 +525,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "region_disk_basic" primary_resource_id: "regiondisk" + version: <%= _version_name %> vars: region_disk_name: "my-region-disk" disk_name: "my-disk" @@ -499,6 +537,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "route_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: route_name: "network-route" network_name: "compute-network" @@ -555,6 +594,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "router_basic" primary_resource_id: "foobar" + version: <%= _version_name %> vars: router_name: "my-router" network_name: "my-network" @@ -579,14 +619,17 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "ssl_certificate_basic" primary_resource_id: "default" + version: <%= _version_name %> ignore_read_extra: - "name_prefix" - !ruby/object:Provider::Terraform::Examples name: "ssl_certificate_random_provider" primary_resource_id: "default" + version: <%= _version_name %> - !ruby/object:Provider::Terraform::Examples name: "ssl_certificate_target_https_proxies" primary_resource_id: "default" + version: <%= _version_name %> vars: target_https_proxy_name: "test-proxy" url_map_name: "url-map" @@ -617,6 +660,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "ssl_policy_basic" primary_resource_id: "prod-ssl-policy" + version: <%= _version_name %> vars: production_ssl_policy_name: "production-ssl-policy" nonprod_ssl_policy_name: "nonprod-ssl-policy" @@ -694,6 +738,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "subnetwork_basic" primary_resource_id: "network-with-private-secondary-ip-ranges" + version: <%= _version_name %> vars: subnetwork_name: "test-subnetwork" network_name: "test-network" @@ -702,6 +747,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "target_http_proxy_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: target_http_proxy_name: "test-proxy" url_map_name: "url-map" @@ -715,6 +761,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "target_https_proxy_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: target_https_proxy_name: "test-proxy" ssl_certificate_name: "my-certificate" @@ -764,6 +811,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "target_ssl_proxy_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: target_ssl_proxy_name: "test-proxy" ssl_certificate_name: "default-cert" @@ -783,6 +831,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "target_tcp_proxy_basic" primary_resource_id: "default" + version: <%= _version_name %> vars: target_tcp_proxy_name: "test-proxy" backend_service_name: "backend-service" @@ -801,6 +850,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "target_vpn_gateway_basic" primary_resource_id: "target_gateway" + version: <%= _version_name %> vars: target_vpn_gateway_name: "vpn1" network_name: "network1" @@ -826,6 +876,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "url_map_basic" primary_resource_id: "urlmap" + version: <%= _version_name %> vars: url_map_name: "urlmap" login_backend_service_name: "login" @@ -866,6 +917,7 @@ overrides: !ruby/object:Provider::ResourceOverrides - !ruby/object:Provider::Terraform::Examples name: "vpn_tunnel_basic" primary_resource_id: "tunnel1" + version: <%= _version_name %> vars: vpn_tunnel_name: "tunnel1" target_vpn_gateway_name: "vpn1" diff --git a/products/containeranalysis/terraform.yaml b/products/containeranalysis/terraform.yaml index 670186456985..c495f062317e 100644 --- a/products/containeranalysis/terraform.yaml +++ b/products/containeranalysis/terraform.yaml @@ -19,11 +19,12 @@ overrides: !ruby/object:Provider::ResourceOverrides custom_code: !ruby/object:Provider::Terraform::CustomCode pre_update: 'templates/terraform/pre_update/containeranalysis_note.erb' example: - - !ruby/object:Provider::Terraform::Examples - name: "container_analysis_note_basic" - primary_resource_id: "note" - vars: - note_name: "test-attestor-note" + - !ruby/object:Provider::Terraform::Examples + name: "container_analysis_note_basic" + primary_resource_id: "note" + version: <%= _version_name %> + vars: + note_name: "test-attestor-note" properties: name: !ruby/object:Provider::Terraform::PropertyOverride custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.erb' diff --git a/products/filestore/terraform.yaml b/products/filestore/terraform.yaml index 80b3801b0d1f..a2c463ac0938 100644 --- a/products/filestore/terraform.yaml +++ b/products/filestore/terraform.yaml @@ -18,11 +18,12 @@ overrides: !ruby/object:Provider::ResourceOverrides id_format: "{{project}}/{{zone}}/{{name}}" import_format: ["projects/{{project}}/locations/{{zone}}/instances/{{name}}"] example: - - !ruby/object:Provider::Terraform::Examples - name: "filestore_instance_basic" - primary_resource_id: "instance" - vars: - instance_name: "test-instance" + - !ruby/object:Provider::Terraform::Examples + name: "filestore_instance_basic" + primary_resource_id: "instance" + version: <%= _version_name %> + vars: + instance_name: "test-instance" properties: name: !ruby/object:Provider::Terraform::PropertyOverride custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.erb' diff --git a/products/redis/terraform.yaml b/products/redis/terraform.yaml index f188b9e0e4d0..103fde1cddfc 100644 --- a/products/redis/terraform.yaml +++ b/products/redis/terraform.yaml @@ -20,17 +20,19 @@ overrides: !ruby/object:Provider::ResourceOverrides custom_code: !ruby/object:Provider::Terraform::CustomCode pre_update: 'templates/terraform/pre_update/redis_instance.erb' example: - - !ruby/object:Provider::Terraform::Examples - name: "redis_instance_basic" - primary_resource_id: "cache" - vars: - instance_name: "memory-cache" - - !ruby/object:Provider::Terraform::Examples - name: "redis_instance_full" - primary_resource_id: "cache" - vars: - instance_name: "ha-memory-cache" - network_name: "authorized-network" + - !ruby/object:Provider::Terraform::Examples + name: "redis_instance_basic" + primary_resource_id: "cache" + version: <%= _version_name %> + vars: + instance_name: "memory-cache" + - !ruby/object:Provider::Terraform::Examples + name: "redis_instance_full" + primary_resource_id: "cache" + version: <%= _version_name %> + vars: + instance_name: "ha-memory-cache" + network_name: "authorized-network" properties: alternativeLocationId: !ruby/object:Provider::Terraform::PropertyOverride default_from_api: true diff --git a/products/resourcemanager/terraform.yaml b/products/resourcemanager/terraform.yaml index 39bec11253e5..9433c704822f 100644 --- a/products/resourcemanager/terraform.yaml +++ b/products/resourcemanager/terraform.yaml @@ -20,12 +20,13 @@ overrides: !ruby/object:Provider::ResourceOverrides exclude: false import_format: ["{{parent}}/{{name}}"] example: - - !ruby/object:Provider::Terraform::Examples - name: "resource_manager_lien" - skip_test: true - primary_resource_id: "lien" - vars: - project_id: "staging-project" + - !ruby/object:Provider::Terraform::Examples + name: "resource_manager_lien" + skip_test: true + primary_resource_id: "lien" + version: <%= _version_name %> + vars: + project_id: "staging-project" properties: name: !ruby/object:Provider::Terraform::PropertyOverride custom_flatten: templates/terraform/custom_flatten/name_from_self_link.erb diff --git a/products/storage/api.yaml b/products/storage/api.yaml index 1716d9e89202..f36d2db35a8a 100644 --- a/products/storage/api.yaml +++ b/products/storage/api.yaml @@ -322,6 +322,10 @@ objects: kind: 'storage#objectAccessControl' base_url: b/{{bucket}}/o/{{object}}/acl self_link: b/{{bucket}}/o/{{object}}/acl/{{entity}} + references: !ruby/object:Api::Resource::ReferenceLinks + guides: + 'Official Documentation': 'https://cloud.google.com/storage/docs/access-control/create-manage-lists' + api: 'https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls' description: | The ObjectAccessControls resources represent the Access Control Lists (ACLs) for objects within Google Cloud Storage. ACLs let you specify diff --git a/products/storage/object_access_control.yaml b/products/storage/object_access_control.yaml index 55f354baac89..ff695258b807 100644 --- a/products/storage/object_access_control.yaml +++ b/products/storage/object_access_control.yaml @@ -27,25 +27,20 @@ output: true - !ruby/object:Api::Type::String name: 'entity' + required: true description: | The entity holding the permission, in one of the following forms: - user-userId - user-email - group-groupId - group-email - domain-domain - project-team-projectId - allUsers - allAuthenticatedUsers - Examples: - The user liz@example.com would be user-liz@example.com. - The group example@googlegroups.com would be - group-example@googlegroups.com. - To refer to all members of the Google Apps for Business domain - example.com, the entity would be domain-example.com. - required: true + * user-{{userId}} + * user-{{email}} (such as "user-liz@example.com") + * group-{{groupId}} + * group-{{email}} (such as "group-example@googlegroups.com") + * domain-{{domain}} (such as "domain-example.com") + * project-team-{{projectId}} + * allUsers + * allAuthenticatedUsers - !ruby/object:Api::Type::String name: 'entityId' + output: true description: 'The ID for the entity' # | 'etag' is not applicable for state convergence. - !ruby/object:Api::Type::Integer @@ -63,6 +58,7 @@ - !ruby/object:Api::Type::NestedObject name: 'projectTeam' description: 'The project team associated with the entity' + output: true properties: - !ruby/object:Api::Type::String name: 'projectNumber' @@ -77,6 +73,7 @@ - !ruby/object:Api::Type::Enum name: 'role' description: 'The access permission for the entity.' + required: true values: - :OWNER - :READER diff --git a/products/storage/terraform.yaml b/products/storage/terraform.yaml new file mode 100644 index 000000000000..1eee3f47f5d6 --- /dev/null +++ b/products/storage/terraform.yaml @@ -0,0 +1,49 @@ +# Copyright 2017 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- !ruby/object:Provider::Terraform::Config +overrides: !ruby/object:Provider::ResourceOverrides + Bucket: !ruby/object:Provider::Terraform::ResourceOverride + exclude: true + BucketAccessControl: !ruby/object:Provider::Terraform::ResourceOverride + exclude: true + ObjectAccessControl: !ruby/object:Provider::Terraform::ResourceOverride + example: + - !ruby/object:Provider::Terraform::Examples + name: "storage_object_access_control_public_object" + primary_resource_id: "public_rule" + skip_test: true + vars: + bucket_name: "static-content-bucket" + object_name: "public-object" + id_format: "{{bucket}}/{{object}}/{{entity}}" + import_format: ["{{bucket}}/{{object}}/{{entity}}"] + properties: + id: !ruby/object:Provider::Terraform::PropertyOverride + exclude: true + bucket: !ruby/object:Provider::Terraform::PropertyOverride + custom_expand: 'templates/terraform/custom_expand/resourceref_as_string.go.erb' + object: !ruby/object:Provider::Terraform::PropertyOverride + description: The name of the object to apply the access control to. + DefaultObjectACL: !ruby/object:Provider::Terraform::ResourceOverride + exclude: true + +# This is for copying files over +files: !ruby/object:Provider::Config::Files + # All of these files will be copied verbatim. + copy: +<%= lines(indent(compile('provider/terraform/common~copy.yaml'), 4)) -%> + # These files have templating (ERB) code that will be run. + # This is usually to add licensing info, autogeneration notices, etc. + compile: +<%= lines(indent(compile('provider/terraform/common~compile.yaml'), 4)) -%> diff --git a/provider/ansible/bundle.rb b/provider/ansible/bundle.rb index e562db487688..2ffc20ab9224 100644 --- a/provider/ansible/bundle.rb +++ b/provider/ansible/bundle.rb @@ -47,7 +47,7 @@ def generate(output_folder, _types, version_name) version = product.version_obj_or_default(version_name) product.set_properties_based_on_version(version) end - compile_files(output_folder) + compile_files(output_folder, version_name) end def products diff --git a/provider/ansible/documentation.rb b/provider/ansible/documentation.rb index 5d1af0bcba21..d43c8750066a 100644 --- a/provider/ansible/documentation.rb +++ b/provider/ansible/documentation.rb @@ -11,168 +11,61 @@ # See the License for the specific language governing permissions and # limitations under the License. -require 'api/object' require 'compile/core' require 'provider/config' require 'provider/core' require 'provider/ansible/manifest' +# Rubocop doesn't like this file because the hashes are complicated. +# Humans like this file because the hashes are explicit and easy to read. +# rubocop:disable Metrics/AbcSize +# rubocop:disable Metrics/CyclomaticComplexity +# rubocop:disable Metrics/PerceivedComplexity module Provider module Ansible # Responsible for building out YAML documentation blocks. - # rubocop:disable Metrics/ModuleLength module Documentation - # Takes a long string and divides each string into multiple paragraphs, - # where each paragraph is a properly indented multi-line bullet point. - # - # Example: - # - This is a paragraph - # that wraps under - # the bullet properly - # - This is the second - # paragraph. - def bullet_lines(line, spaces) - line.split(".\n").map { |paragraph| bullet_line(paragraph, spaces) } - end - - # Takes in a string (representing a paragraph) and returns a multi-line - # string, where each line is less than max_length characters long and all - # subsequent lines are indented in by spaces characters - # - # Example: - # - This is a sentence - # that wraps under - # the bullet properly - # - # - | - # This is a sentence - # that wraps under - # the bullet properly - # because of the : - # character - # rubocop:disable Metrics/AbcSize - def bullet_line(paragraph, spaces, _multiline = true, add_period = true) - paragraph += '.' unless paragraph.end_with?('.') || !add_period - paragraph = format_url(paragraph) - paragraph = paragraph.tr("\n", ' ').strip - - # Paragraph placed inside array to get bullet point. - yaml = [paragraph].to_yaml - # YAML documentation header is not necessary. - yaml = yaml.gsub("---\n", '') if yaml.include?("---\n") - - # YAML dumper isn't very smart about line lengths. - # If any line is over 160 characters (with indents), build the YAML - # block using wrap_field. - # Using YAML.dump output ensures that all character escaping done - if yaml.split("\n").any? { |line| line.length > (160 - spaces) } - return wrap_field( - yaml.tr("\n", ' ').gsub(/\s+/, ' '), - spaces + 3 - ).each_with_index.map { |x, i| i.zero? ? x : indent(x, 2) } - end - yaml - end - # rubocop:enable Metrics/AbcSize - - # Builds out a full YAML block for DOCUMENTATION - # This includes the YAML for the property as well as any nested props - def doc_property_yaml(prop, object, spaces) - block = minimal_doc_block(prop, object, spaces) - # Ansible linter does not support nesting options this deep. - if prop.is_a?(Api::Type::NestedObject) - block.concat(nested_doc(prop.properties, object, spaces)) - elsif prop.is_a?(Api::Type::Array) && - prop.item_type.is_a?(Api::Type::NestedObject) - block.concat(nested_doc(prop.item_type.properties, object, spaces)) - else - block - end - end - - # Builds out a full YAML block for RETURNS - # This includes the YAML for the property as well as any nested props - def return_property_yaml(prop, spaces) - block = minimal_return_block(prop, spaces) - if prop.is_a? Api::Type::NestedObject - block.concat(nested_return(prop.properties, spaces)) - elsif prop.is_a?(Api::Type::Array) && - prop.item_type.is_a?(Api::Type::NestedObject) - block.concat(nested_return(prop.item_type.properties, spaces)) + def to_yaml(obj) + if obj.is_a?(::Hash) + obj.reject { |_, v| v.nil? }.to_yaml.sub("---\n", '') else - block + obj.to_yaml.sub("---\n", '') end end - private - - # Find URLs and surround with U() - def format_url(paragraph) - paragraph.gsub(%r{ - https?:\/\/(?:www\.|(?!www))[a-zA-Z0-9] - [a-zA-Z0-9-]+[a-zA-Z0-9]\.[^\s]{2,}|www\.[a-zA-Z0-9][a-zA-Z0-9-]+ - [a-zA-Z0-9]\.[^\s]{2,}|https?:\/\/(?:www\.|(?!www)) - [a-zA-Z0-9]\.[^\s]{2,}|www\.[a-zA-Z0-9]\.[^\s]{2,} - }x, 'U(\\0)') - end - - # Returns formatted nested documentation for a set of properties. - def nested_return(properties, spaces) - block = [indent('contains:', 4)] - block.concat( - properties.map do |p| - indent(return_property_yaml(p, spaces + 4), 8) - end - ) - end - - def nested_doc(properties, object, spaces) - block = [indent('suboptions:', 4)] - block.concat( - properties.map do |p| - indent(doc_property_yaml(p, object, spaces + 4), 8) - end - ) - end - - # Builds out the minimal YAML block for DOCUMENTATION - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity - # rubocop:disable Metrics/AbcSize - def minimal_doc_block(prop, _object, spaces) - required = prop.required && !prop.default_value ? 'true' : 'false' - [ - "#{prop.name.underscore}:", - indent( - [ - 'description:', - # + 8 to compensate for name + description. - indent(bullet_lines(prop.description, spaces + 8), 4), - (indent(bullet_lines(resourceref_description(prop), spaces + 8), 4) \ + # Builds out the DOCUMENTATION for a property. + # This will eventually be converted to YAML + def documentation_for_property(prop) + required = prop.required && !prop.default_value ? true : false + { + prop.name.underscore => { + 'description' => [ + format_description(prop.description), + (resourceref_description(prop) \ if prop.is_a?(Api::Type::ResourceRef) && !prop.resource_ref.readonly) - ].compact, 4 - ), - indent([ - "required: #{required}", - ("default: #{prop.default_value}" if prop.default_value), - ('type: bool' if prop.is_a? Api::Type::Boolean), - ("aliases: [#{prop.aliases.join(', ')}]" if prop.aliases), - ("version_added: #{prop.version_added}" if prop.version_added), - (if prop.is_a? Api::Type::Enum - [ - 'choices:', - "[#{prop.values.map { |x| quote_string(x.to_s) }.join(', ')}]" - ].join(' ') - end) - ].compact, 4) - ] + ].flatten.compact, + 'required' => required, + 'default' => (prop.default_value.to_s if prop.default_value), + 'type' => ('bool' if prop.is_a? Api::Type::Boolean), + 'aliases' => (prop.aliases if prop.aliases), + 'version_added' => (prop.version_added.to_f if prop.version_added), + 'choices' => (prop.values.map(&:to_s) if prop.is_a? Api::Type::Enum), + 'suboptions' => ( + if prop.is_a?(Api::Type::NestedObject) + prop.properties.map { |p| documentation_for_property(p) }.reduce({}, :merge) + elsif prop.is_a?(Api::Type::Array) && prop.item_type.is_a?(Api::Type::NestedObject) + prop.item_type.properties + .map { |p| documentation_for_property(p) } + .reduce({}, :merge) + end + ) + }.reject { |_, v| v.nil? } + } end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/AbcSize - # rubocop:enable Metrics/PerceivedComplexity - # Builds out the minimal YAML block for RETURNS - def minimal_return_block(prop, spaces) + # Builds out the RETURNS for a property. + # This will eventually be converted to YAML + def returns_for_property(prop) type = python_type(prop) # Complex types only mentioned in reference to RETURNS YAML block # Complex types are nested objects traditionally, but arrays of nested @@ -180,20 +73,20 @@ def minimal_return_block(prop, spaces) type = 'complex' if prop.is_a?(Api::Type::NestedObject) \ || (prop.is_a?(Api::Type::Array) \ && prop.item_type.is_a?(Api::Type::NestedObject)) - [ - "#{prop.name}:", - indent( - [ - 'description:', - # + 8 to compensate for name + description. - indent(bullet_lines(prop.description, spaces + 8), 4) - ], 4 - ), - indent([ - 'returned: success', - "type: #{type}" - ], 4) - ] + { + prop.name => { + 'description' => format_description(prop.description), + 'returned' => 'success', + 'type' => type, + 'contains' => ( + if prop.is_a?(Api::Type::NestedObject) + prop.properties.map { |p| returns_for_property(p) }.reduce({}, :merge) + elsif prop.is_a?(Api::Type::Array) && prop.item_type.is_a?(Api::Type::NestedObject) + prop.item_type.properties.map { |p| returns_for_property(p) }.reduce({}, :merge) + end + ) + }.reject { |_, v| v.nil? } + } end def autogen_notice_contrib @@ -212,7 +105,28 @@ def resourceref_description(prop) "where the value is the #{prop.imports} of your #{prop.resource_ref.name}" ].join(' ') end + + # MM puts descriptions in a text block. Ansible needs it in bullets + def format_description(desc) + desc.split(".\n").map do |paragraph| + paragraph += '.' unless paragraph.end_with?('.') + paragraph = format_url(paragraph) + paragraph.tr("\n", ' ').strip.squeeze(' ') + end + end + + # Find URLs and surround with U() + def format_url(paragraph) + paragraph.gsub(%r{ + https?:\/\/(?:www\.|(?!www))[a-zA-Z0-9] + [a-zA-Z0-9-]+[a-zA-Z0-9]\.[^\s]{2,}|www\.[a-zA-Z0-9][a-zA-Z0-9-]+ + [a-zA-Z0-9]\.[^\s]{2,}|https?:\/\/(?:www\.|(?!www)) + [a-zA-Z0-9]\.[^\s]{2,}|www\.[a-zA-Z0-9]\.[^\s]{2,} + }x, 'U(\\0)') + end end - # rubocop:enable Metrics/ModuleLength end end +# rubocop:enable Metrics/AbcSize +# rubocop:enable Metrics/CyclomaticComplexity +# rubocop:enable Metrics/PerceivedComplexity diff --git a/provider/chef/bundle.rb b/provider/chef/bundle.rb index d8b315e22ae5..aed55250879d 100644 --- a/provider/chef/bundle.rb +++ b/provider/chef/bundle.rb @@ -40,7 +40,7 @@ class Manifest < Provider::Chef::Manifest attr_accessor :releases end - def generate(output_folder, _types, _version_name) + def generate(output_folder, _types, version_name) # Let's build all the dependencies off of the products we found on our # path and has the corresponding provider.yaml file @config.manifest.depends.concat( @@ -54,7 +54,7 @@ def generate(output_folder, _types, _version_name) copy_files(output_folder) compile_changelog(output_folder) - compile_files(output_folder) + compile_files(output_folder, version_name) end def products diff --git a/provider/core.rb b/provider/core.rb index 569afb947e65..23dca6958251 100644 --- a/provider/core.rb +++ b/provider/core.rb @@ -65,8 +65,7 @@ def initialize(config, api) # rubocop:disable Metrics/CyclomaticComplexity # rubocop:disable Metrics/PerceivedComplexity def generate(output_folder, types, version_name) - version = @api.version_obj_or_default(version_name) - generate_objects(output_folder, types, version) + generate_objects(output_folder, types, version_name) generate_client_functions(output_folder) unless @config.functions.nil? copy_files(output_folder) \ unless @config.files.nil? || @config.files.copy.nil? @@ -78,10 +77,10 @@ def generate(output_folder, types, version_name) # Compilation has to be the last step, as some files (e.g. # CONTRIBUTING.md) may depend on the list of all files previously copied # or compiled. - compile_files(output_folder) \ + compile_files(output_folder, version_name) \ unless @config.files.nil? || @config.files.compile.nil? - generate_datasources(output_folder, types, version) \ + generate_datasources(output_folder, types, version_name) \ unless @config.datasources.nil? apply_file_acls(output_folder) \ unless @config.files.nil? || @config.files.permissions.nil? @@ -102,8 +101,8 @@ def copy_files(output_folder) end end - def compile_files(output_folder) - compile_file_list(output_folder, @config.files.compile) + def compile_files(output_folder, version_name) + compile_file_list(output_folder, @config.files.compile, version: version_name) end def compile_examples(output_folder) @@ -217,7 +216,8 @@ def compile_file_list(output_folder, files, data = {}) # rubocop:disable Metrics/CyclomaticComplexity # rubocop:disable Metrics/PerceivedComplexity # rubocop:disable Metrics/AbcSize - def generate_objects(output_folder, types, version) + def generate_objects(output_folder, types, version_name) + version = @api.version_obj_or_default(version_name) @api.set_properties_based_on_version(version) (@api.objects || []).each do |object| if !types.empty? && !types.include?(object.name) @@ -227,7 +227,11 @@ def generate_objects(output_folder, types, version) elsif types.empty? && object.exclude_if_not_in_version(version) Google::LOGGER.info "Excluding #{object.name} per API version" else - generate_object object, output_folder, version + # version_name will differ from version.name if the resource is being + # generated at its default version instead of the one that was passed + # in to the compiler. Terraform needs to know which version was passed + # in so it can name its output directories correctly. + generate_object object, output_folder, version_name end end end @@ -235,8 +239,8 @@ def generate_objects(output_folder, types, version) # rubocop:enable Metrics/PerceivedComplexity # rubocop:enable Metrics/AbcSize - def generate_object(object, output_folder, version) - data = build_object_data(object, output_folder, version) + def generate_object(object, output_folder, version_name) + data = build_object_data(object, output_folder, version_name) generate_resource data generate_resource_tests data @@ -247,10 +251,11 @@ def generate_object(object, output_folder, version) # rubocop:disable Metrics/AbcSize # rubocop:disable Metrics/CyclomaticComplexity # rubocop:disable Metrics/PerceivedComplexity - def generate_datasources(output_folder, types, version) + def generate_datasources(output_folder, types, version_name) # We need to apply overrides for datasources @config.datasources.validate + version = @api.version_obj_or_default(version_name) @api.set_properties_based_on_version(version) @api.objects.each do |object| if !types.empty? && !types.include?(object.name) @@ -266,7 +271,7 @@ def generate_datasources(output_folder, types, version) "Excluding #{object.name} datasource per API version" ) else - generate_datasource object, output_folder, version + generate_datasource object, output_folder, version_name end end end @@ -274,8 +279,8 @@ def generate_datasources(output_folder, types, version) # rubocop:enable Metrics/PerceivedComplexity # rubocop:enable Metrics/AbcSize - def generate_datasource(object, output_folder, version) - data = build_object_data(object, output_folder, version) + def generate_datasource(object, output_folder, version_name) + data = build_object_data(object, output_folder, version_name) compile_datasource data end diff --git a/provider/inspec.rb b/provider/inspec.rb index 69439d7b94de..a904cadcef75 100644 --- a/provider/inspec.rb +++ b/provider/inspec.rb @@ -16,10 +16,12 @@ require 'provider/inspec/manifest' require 'provider/inspec/resource_override' require 'provider/inspec/property_override' +require 'active_support/inflector' module Provider # Code generator for Example Cookbooks that manage Google Cloud Platform # resources. + # rubocop:disable Metrics/ClassLength class Inspec < Provider::Core include Google::RubyUtils # Settings for the provider @@ -50,14 +52,24 @@ def generate_resource(data) ) generate_resource_file data.clone.merge( default_template: 'templates/inspec/plural_resource.erb', - out_file: File.join(target_folder, "google_#{data[:product_name]}_#{name}s.rb") + out_file: \ + File.join(target_folder, "google_#{data[:product_name]}_#{name}".pluralize + '.rb') ) + generate_documentation(data) end - # Returns the url that this object can be retrieved from - # based off of the self link - def url(object) - url = object.self_link_url[1] + # Generates InSpec markdown documents for the resource + def generate_documentation(data) + name = data[:object].name.underscore + docs_folder = File.join(data[:output_folder], 'docs', 'resources') + generate_resource_file data.clone.merge( + default_template: 'templates/inspec/doc-template.md.erb', + out_file: File.join(docs_folder, "google_#{data[:product_name]}_#{name}.md") + ) + end + + # Format a url that may be include newlines into a single line + def format_url(url) return url.join('') if url.is_a?(Array) url.split("\n").join('') end @@ -73,8 +85,110 @@ def generate_typed_array(data, prop) end def emit_resourceref_object(data) end - def emit_nested_object(data) end - def generate_network_datas(data, object) end + + def emit_nested_object(data) + target = if data[:emit_array] + data[:property].item_type.property_file + else + data[:property].property_file + end + { + source: File.join('templates', 'inspec', 'nested_object.erb'), + target: "libraries/#{target}.rb", + overrides: emit_nested_object_overrides(data) + } + end + + def emit_nested_object_overrides(data) + data.clone.merge( + api_name: data[:api_name].camelize(:upper), + object_type: data[:obj_name].camelize(:upper), + product_ns: data[:product_name].camelize(:upper), + class_name: if data[:emit_array] + data[:property].item_type.property_class.last + else + data[:property].property_class.last + end + ) + end + + def time?(property) + property.is_a?(::Api::Type::Time) + end + + # Figuring out if a property is a primitive ruby type is a hassle. But it is important + # Fingerprints are strings, NameValues are hashes, and arrays of primitives are arrays + # Arrays of NestedObjects need to have their contents parsed and returned in an array + # ResourceRefs are strings + def primitive?(property) + array_primitive = (property.is_a?(Api::Type::Array)\ + && !property.item_type.is_a?(::Api::Type::NestedObject)) + property.is_a?(::Api::Type::Primitive)\ + || array_primitive\ + || property.is_a?(::Api::Type::NameValues)\ + || property.is_a?(::Api::Type::Fingerprint)\ + || property.is_a?(::Api::Type::ResourceRef) + end + + # Arrays of nested objects need special requires statements + def typed_array?(property) + property.is_a?(::Api::Type::Array) && nested_object?(property.item_type) + end + + def nested_object?(property) + property.is_a?(::Api::Type::NestedObject) + end + + # Only arrays of nested objects and nested object properties need require statements + # for InSpec. Primitives are all handled natively + def generate_requires(properties) + nested_props = properties.select { |type| nested_object?(type) } + nested_object_arrays = properties.select\ + { |type| typed_array?(type) && nested_object?(type.item_type) } + nested_array_requires = nested_object_arrays.collect { |type| array_requires(type) } + # Need to include requires statements for the requirements of a nested object + # TODO is this needed? Not sure how ruby works so well + nested_prop_requires = nested_props.map\ + { |nested_prop| generate_requires(nested_prop.properties) } + nested_object_requires = nested_props.map\ + { |nested_object| nested_object_requires(nested_object) } + nested_object_requires + nested_prop_requires + nested_array_requires + end + + def array_requires(type) + File.join( + 'google', + type.__resource.__product.prefix[1..-1], + 'property', + [type.__resource.name.downcase, type.item_type.name.underscore].join('_') + ) + end + + def nested_object_requires(nested_object_type) + File.join( + 'google', + nested_object_type.__resource.__product.prefix[1..-1], + 'property', + [nested_object_type.__resource.name, nested_object_type.name.underscore].join('_') + ).downcase + end + + def resource_name(object, product_ns) + "google_#{product_ns.downcase}_#{object.name.underscore}" + end + + def sub_property_descriptions(property) + if nested_object?(property) + return property.properties.map \ + { |prop| " * `#{prop.name}`: #{prop.description}" }.join("\n") + end + # rubocop:disable Style/GuardClause + if typed_array?(property) + return property.item_type.properties.map \ + { |prop| " * `#{prop.name}`: #{prop.description}" }.join("\n") + end + end + # rubocop:enable Style/GuardClause end end diff --git a/provider/puppet/bundle.rb b/provider/puppet/bundle.rb index c281dfcefb19..76df015da70a 100644 --- a/provider/puppet/bundle.rb +++ b/provider/puppet/bundle.rb @@ -35,7 +35,7 @@ def provider end end - def generate(output_folder, _types, _version_name) + def generate(output_folder, _types, version_name) # Let's build all the dependencies off of the products we found on our # path and has the corresponding provider.yaml file @config.manifest.releases = releases @@ -48,7 +48,7 @@ def generate(output_folder, _types, _version_name) compile_changelog(output_folder) copy_files(output_folder) - compile_files(output_folder) + compile_files(output_folder, version_name) end def products diff --git a/provider/terraform.rb b/provider/terraform.rb index e761d4a646d9..da1ed37bd54d 100644 --- a/provider/terraform.rb +++ b/provider/terraform.rb @@ -117,7 +117,8 @@ def nested_properties(property) # per resource. The resource.erb template forms the basis of a single # GCP Resource on Terraform. def generate_resource(data) - target_folder = File.join(data[:output_folder], 'google') + dir = data[:version] == 'beta' ? 'google-beta' : 'google' + target_folder = File.join(data[:output_folder], dir) FileUtils.mkpath target_folder name = data[:object].name.underscore product_name = data[:product_name].underscore @@ -150,7 +151,8 @@ def generate_documentation(data) def generate_resource_tests(data) return if data[:object].example.reject(&:skip_test).empty? - target_folder = File.join(data[:output_folder], 'google') + dir = data[:version] == 'beta' ? 'google-beta' : 'google' + target_folder = File.join(data[:output_folder], dir) FileUtils.mkpath target_folder name = data[:object].name.underscore product_name = data[:product_name].underscore diff --git a/provider/terraform/common~compile.yaml b/provider/terraform/common~compile.yaml index 63085cbb98a7..56dd7748503f 100644 --- a/provider/terraform/common~compile.yaml +++ b/provider/terraform/common~compile.yaml @@ -14,3 +14,27 @@ # the final module tree structure: <% dir = _version_name == 'beta' ? 'google-beta' : 'google' -%> '<%= dir -%>/provider_<%= api.prefix[1..-1] -%>_gen.go': 'templates/terraform/provider_gen.erb' + +<% Dir["provider/terraform/tests/*.go.erb"].each do |file_path| + fname = file_path.split('/')[-1] +-%> +'<%= dir -%>/<%= fname.split(".erb")[0] -%>': 'provider/terraform/tests/<%= fname -%>' +<% end -%> +<% + Dir["provider/terraform/resources/*.go.erb"].each do |file_path| + fname = file_path.split('/')[-1] +-%> +'<%= dir -%>/<%= fname.split(".erb")[0] -%>': 'provider/terraform/resources/<%= fname -%>' +<% end -%> +<% + Dir["provider/terraform/data_sources/*.go.erb"].each do |file_path| + fname = file_path.split('/')[-1] +-%> +'<%= dir -%>/<%= fname.split(".erb")[0] -%>': 'provider/terraform/data_sources/<%= fname -%>' +<% end -%> +<% + Dir["provider/terraform/utils/*.go.erb"].each do |file_path| + fname = file_path.split('/')[-1] +-%> +'<%= dir -%>/<%= fname.split(".erb")[0] -%>': 'provider/terraform/utils/<%= fname -%>' +<% end -%> diff --git a/provider/terraform/custom_code.rb b/provider/terraform/custom_code.rb index 3a77e7ae8342..fa770ecf86ab 100644 --- a/provider/terraform/custom_code.rb +++ b/provider/terraform/custom_code.rb @@ -68,6 +68,9 @@ class Examples < Api::Object # vars is a Hash from template variable names to output variable names attr_reader :vars + # the version (ga, beta, etc.) this example is being generated at + attr_reader :version + # Extra properties to ignore read on during import. # These properties will likely be custom code. attr_reader :ignore_read_extra @@ -79,7 +82,8 @@ def config_documentation body = lines(compile_file( { vars: vars, - primary_resource_id: primary_resource_id + primary_resource_id: primary_resource_id, + version: version }, "templates/terraform/examples/#{name}.tf.erb" )) @@ -94,7 +98,8 @@ def config_test body = lines(compile_file( { vars: vars.map { |k, str| [k, "#{str}-%s"] }.to_h, - primary_resource_id: primary_resource_id + primary_resource_id: primary_resource_id, + version: version }, "templates/terraform/examples/#{name}.tf.erb" )) @@ -115,7 +120,8 @@ def config_example body = lines(compile_file( { vars: vars.map { |k, str| [k, "#{str}-${local.name_suffix}"] }.to_h, - primary_resource_id: primary_resource_id + primary_resource_id: primary_resource_id, + version: version }, "templates/terraform/examples/#{name}.tf.erb" )) @@ -123,6 +129,22 @@ def config_example substitute_example_paths body end + def oics_link + hash = { + cloudshell_git_repo: 'https://github.com/terraform-google-modules/docs-examples.git', + cloudshell_working_dir: @name, + cloudshell_image: 'gcr.io/graphite-cloud-shell-images/terraform:latest', + open_in_editor: 'main.tf', + cloudshell_print: './motd', + cloudshell_tutorial: './tutorial.md' + } + URI::HTTPS.build( + host: 'console.cloud.google.com', + path: '/cloudshell/open', + query: URI.encode_www_form(hash) + ) + end + def substitute_test_paths(config) config = config.gsub('path/to/private.key', 'test-fixtures/ssl_cert/test.key') config.gsub('path/to/certificate.crt', 'test-fixtures/ssl_cert/test.crt') diff --git a/provider/terraform/data_sources/data_source_google_container_engine_versions.go b/provider/terraform/data_sources/data_source_google_container_engine_versions.go.erb similarity index 96% rename from provider/terraform/data_sources/data_source_google_container_engine_versions.go rename to provider/terraform/data_sources/data_source_google_container_engine_versions.go.erb index 5e6e420195fd..14063b548091 100644 --- a/provider/terraform/data_sources/data_source_google_container_engine_versions.go +++ b/provider/terraform/data_sources/data_source_google_container_engine_versions.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -20,7 +21,9 @@ func dataSourceGoogleContainerEngineVersions() *schema.Resource { Optional: true, }, "region": { +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeString, Optional: true, ConflictsWith: []string{"zone"}, diff --git a/provider/terraform/resources/resource_bigtable_instance.go b/provider/terraform/resources/resource_bigtable_instance.go index d910d6d8663a..30c303dafddd 100644 --- a/provider/terraform/resources/resource_bigtable_instance.go +++ b/provider/terraform/resources/resource_bigtable_instance.go @@ -5,7 +5,6 @@ import ( "fmt" "log" - "github.com/hashicorp/terraform/helper/customdiff" "github.com/hashicorp/terraform/helper/schema" "github.com/hashicorp/terraform/helper/validation" @@ -16,13 +15,7 @@ func resourceBigtableInstance() *schema.Resource { return &schema.Resource{ Create: resourceBigtableInstanceCreate, Read: resourceBigtableInstanceRead, - // TODO: Update is only needed because we're doing forcenew in customizediff - // when we're done with the deprecation, we can drop customizediff and make cluster forcenew - Update: schema.Noop, Delete: resourceBigtableInstanceDestroy, - CustomizeDiff: customdiff.All( - resourceBigTableInstanceClusterCustomizeDiff, - ), Schema: map[string]*schema.Schema{ "name": { @@ -31,28 +24,20 @@ func resourceBigtableInstance() *schema.Resource { ForceNew: true, }, - "cluster_id": { - Type: schema.TypeString, - Optional: true, - Deprecated: "Use cluster instead.", - ConflictsWith: []string{"cluster"}, - }, - "cluster": { - Type: schema.TypeSet, - Optional: true, - MaxItems: 1, - ConflictsWith: []string{"cluster_id", "zone", "num_nodes", "storage_type"}, + Type: schema.TypeSet, + Required: true, + ForceNew: true, + MaxItems: 2, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "cluster_id": { Type: schema.TypeString, - Optional: true, + Required: true, }, "zone": { Type: schema.TypeString, - Optional: true, - Computed: true, + Required: true, }, "num_nodes": { Type: schema.TypeInt, @@ -68,15 +53,6 @@ func resourceBigtableInstance() *schema.Resource { }, }, - "zone": { - Type: schema.TypeString, - Optional: true, - Computed: true, - ForceNew: true, - Deprecated: "Use cluster instead.", - ConflictsWith: []string{"cluster"}, - }, - "display_name": { Type: schema.TypeString, Optional: true, @@ -84,13 +60,6 @@ func resourceBigtableInstance() *schema.Resource { Computed: true, }, - "num_nodes": { - Type: schema.TypeInt, - Optional: true, - Deprecated: "Use cluster instead.", - ConflictsWith: []string{"cluster"}, - }, - "instance_type": { Type: schema.TypeString, Optional: true, @@ -99,67 +68,42 @@ func resourceBigtableInstance() *schema.Resource { ValidateFunc: validation.StringInSlice([]string{"DEVELOPMENT", "PRODUCTION"}, false), }, - "storage_type": { - Type: schema.TypeString, - Optional: true, - Default: "SSD", - ValidateFunc: validation.StringInSlice([]string{"SSD", "HDD"}, false), - Deprecated: "Use cluster instead.", - ConflictsWith: []string{"cluster"}, - }, - "project": { Type: schema.TypeString, Optional: true, Computed: true, ForceNew: true, }, - }, - } -} -func resourceBigTableInstanceClusterCustomizeDiff(d *schema.ResourceDiff, meta interface{}) error { - if d.Get("cluster_id").(string) == "" && d.Get("cluster.#").(int) == 0 { - return fmt.Errorf("At least one cluster must be set.") - } - if !d.HasChange("cluster_id") && !d.HasChange("zone") && !d.HasChange("num_nodes") && - !d.HasChange("storage_type") && !d.HasChange("cluster") { - return nil - } - if d.Get("cluster.#").(int) == 1 { - // if we have exactly one cluster, and it has the same values as the old top-level - // values, we can assume the user is trying to go from the deprecated values to the - // new values, and we shouldn't ForceNew. We know that the top-level values aren't - // set, because they ConflictWith cluster. - oldID, _ := d.GetChange("cluster_id") - oldNodes, _ := d.GetChange("num_nodes") - oldZone, _ := d.GetChange("zone") - oldStorageType, _ := d.GetChange("storage_type") - new := d.Get("cluster").(*schema.Set).List()[0].(map[string]interface{}) - - if oldID.(string) == new["cluster_id"].(string) && - oldNodes.(int) == new["num_nodes"].(int) && - oldZone.(string) == new["zone"].(string) && - oldStorageType.(string) == new["storage_type"].(string) { - return nil - } - } - if d.HasChange("cluster_id") { - d.ForceNew("cluster_id") - } - if d.HasChange("cluster") { - d.ForceNew("cluster") - } - if d.HasChange("zone") { - d.ForceNew("zone") - } - if d.HasChange("num_nodes") { - d.ForceNew("num_nodes") - } - if d.HasChange("storage_type") { - d.ForceNew("storage_type") + "cluster_id": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Removed: "Use cluster instead.", + }, + + "zone": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Removed: "Use cluster instead.", + }, + + "num_nodes": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Removed: "Use cluster instead.", + }, + + "storage_type": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Removed: "Use cluster instead.", + }, + }, } - return nil } func resourceBigtableInstanceCreate(d *schema.ResourceData, meta interface{}) error { @@ -188,31 +132,9 @@ func resourceBigtableInstanceCreate(d *schema.ResourceData, meta interface{}) er conf.InstanceType = bigtable.PRODUCTION } - if d.Get("cluster.#").(int) > 0 { - // expand cluster - conf.Clusters = expandBigtableClusters(d.Get("cluster").(*schema.Set).List(), conf.InstanceID, config.Zone) - if err != nil { - return fmt.Errorf("error expanding clusters: %s", err.Error()) - } - } else { - // TODO: remove this when we're done with the deprecation period - zone, err := getZone(d, config) - if err != nil { - return err - } - cluster := bigtable.ClusterConfig{ - InstanceID: conf.InstanceID, - NumNodes: int32(d.Get("num_nodes").(int)), - Zone: zone, - ClusterID: d.Get("cluster_id").(string), - } - switch d.Get("storage_type").(string) { - case "HDD": - cluster.StorageType = bigtable.HDD - case "SSD": - cluster.StorageType = bigtable.SSD - } - conf.Clusters = append(conf.Clusters, cluster) + conf.Clusters = expandBigtableClusters(d.Get("cluster").(*schema.Set).List(), conf.InstanceID) + if err != nil { + return fmt.Errorf("error expanding clusters: %s", err.Error()) } c, err := config.bigtableClientFactory.NewInstanceAdminClient(project) @@ -256,37 +178,27 @@ func resourceBigtableInstanceRead(d *schema.ResourceData, meta interface{}) erro } d.Set("project", project) - if d.Get("cluster.#").(int) > 0 { - clusters := d.Get("cluster").(*schema.Set).List() - clusterState := []map[string]interface{}{} - for _, cl := range clusters { - cluster := cl.(map[string]interface{}) - clus, err := c.GetCluster(ctx, instance.Name, cluster["cluster_id"].(string)) - if err != nil { - if isGoogleApiErrorWithCode(err, 404) { - log.Printf("[WARN] Cluster %q not found, not setting it in state", cluster["cluster_id"].(string)) - continue - } - return fmt.Errorf("Error retrieving cluster %q: %s", cluster["cluster_id"].(string), err.Error()) - } - clusterState = append(clusterState, flattenBigtableCluster(clus, cluster["storage_type"].(string))) - } - err = d.Set("cluster", clusterState) - if err != nil { - return fmt.Errorf("Error setting clusters in state: %s", err.Error()) - } - d.Set("cluster_id", "") - d.Set("zone", "") - d.Set("num_nodes", 0) - d.Set("storage_type", "SSD") - } else { - // TODO remove this when we're done with our deprecation period - zone, err := getZone(d, config) + + clusters := d.Get("cluster").(*schema.Set).List() + clusterState := []map[string]interface{}{} + for _, cl := range clusters { + cluster := cl.(map[string]interface{}) + clus, err := c.GetCluster(ctx, instance.Name, cluster["cluster_id"].(string)) if err != nil { - return err + if isGoogleApiErrorWithCode(err, 404) { + log.Printf("[WARN] Cluster %q not found, not setting it in state", cluster["cluster_id"].(string)) + continue + } + return fmt.Errorf("Error retrieving cluster %q: %s", cluster["cluster_id"].(string), err.Error()) } - d.Set("zone", zone) + clusterState = append(clusterState, flattenBigtableCluster(clus, cluster["storage_type"].(string))) + } + + err = d.Set("cluster", clusterState) + if err != nil { + return fmt.Errorf("Error setting clusters in state: %s", err.Error()) } + d.Set("name", instance.Name) d.Set("display_name", instance.DisplayName) @@ -329,14 +241,11 @@ func flattenBigtableCluster(c *bigtable.ClusterInfo, storageType string) map[str } } -func expandBigtableClusters(clusters []interface{}, instanceID string, defaultZone string) []bigtable.ClusterConfig { +func expandBigtableClusters(clusters []interface{}, instanceID string) []bigtable.ClusterConfig { results := make([]bigtable.ClusterConfig, 0, len(clusters)) for _, c := range clusters { cluster := c.(map[string]interface{}) - zone := defaultZone - if confZone, ok := cluster["zone"]; ok { - zone = confZone.(string) - } + zone := cluster["zone"].(string) var storageType bigtable.StorageType switch cluster["storage_type"].(string) { case "SSD": diff --git a/provider/terraform/resources/resource_cloudiot_registry.go b/provider/terraform/resources/resource_cloudiot_registry.go index a7aa0d066bf4..ae850fcb5204 100644 --- a/provider/terraform/resources/resource_cloudiot_registry.go +++ b/provider/terraform/resources/resource_cloudiot_registry.go @@ -76,6 +76,7 @@ func resourceCloudIoTRegistry() *schema.Resource { }, "mqtt_config": &schema.Schema{ Type: schema.TypeMap, + Computed: true, Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -90,6 +91,7 @@ func resourceCloudIoTRegistry() *schema.Resource { }, "http_config": &schema.Schema{ Type: schema.TypeMap, + Computed: true, Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -233,6 +235,11 @@ func resourceCloudIoTRegistryCreate(d *schema.ResourceData, meta interface{}) er d.SetId("") return err } + + // If we infer project and region, they are never actually set so we set them here + d.Set("project", project) + d.Set("region", region) + return resourceCloudIoTRegistryRead(d, meta) } @@ -317,19 +324,9 @@ func resourceCloudIoTRegistryRead(d *schema.ResourceData, meta interface{}) erro } else { d.Set("state_notification_config", nil) } - // If no config exist for mqtt or http config default values are omitted. - mqttState := res.MqttConfig.MqttEnabledState - _, hasMqttConfig := d.GetOk("mqtt_config") - if mqttState != mqttEnabled || hasMqttConfig { - d.Set("mqtt_config", - map[string]string{"mqtt_enabled_state": mqttState}) - } - httpState := res.HttpConfig.HttpEnabledState - _, hasHttpConfig := d.GetOk("http_config") - if httpState != httpEnabled || hasHttpConfig { - d.Set("http_config", - map[string]string{"http_enabled_state": httpState}) - } + + d.Set("mqtt_config", map[string]string{"mqtt_enabled_state": res.MqttConfig.MqttEnabledState}) + d.Set("http_config", map[string]string{"http_enabled_state": res.HttpConfig.HttpEnabledState}) credentials := make([]map[string]interface{}, len(res.Credentials)) for i, item := range res.Credentials { diff --git a/provider/terraform/resources/resource_composer_environment.go b/provider/terraform/resources/resource_composer_environment.go index dc220fbe5bc5..c9736ffbfa43 100644 --- a/provider/terraform/resources/resource_composer_environment.go +++ b/provider/terraform/resources/resource_composer_environment.go @@ -677,10 +677,34 @@ func expandComposerEnvironmentZone(v interface{}, d *schema.ResourceData, config return getRelativePath(zone) } -func expandComposerEnvironmentMachineType(v interface{}, d *schema.ResourceData, config *Config, nodeCfgZone interface{}) (string, error) { +func expandComposerEnvironmentMachineType(v interface{}, d *schema.ResourceData, config *Config, nodeCfgZone string) (string, error) { + machineType := v.(string) + requiredZone := GetResourceNameFromSelfLink(nodeCfgZone) + fv, err := ParseMachineTypesFieldValue(v.(string), d, config) if err != nil { - return "", nil + if requiredZone == "" { + return "", err + } + + // Try to construct machine type with zone/project given in config. + project, err := getProject(d, config) + if err != nil { + return "", err + } + + fv = &ZonalFieldValue{ + Project: project, + Zone: requiredZone, + Name: GetResourceNameFromSelfLink(machineType), + resourceType: "machineTypes", + } + } + + // Make sure zone in node_config.machineType matches node_config.zone if + // given. + if requiredZone != "" && fv.Zone != requiredZone { + return "", fmt.Errorf("node_config machine_type %q must be in node_config zone %q", machineType, requiredZone) } return fv.RelativeLink(), nil } diff --git a/provider/terraform/resources/resource_compute_backend_service.go b/provider/terraform/resources/resource_compute_backend_service.go.erb similarity index 99% rename from provider/terraform/resources/resource_compute_backend_service.go rename to provider/terraform/resources/resource_compute_backend_service.go.erb index fd747ed57e58..864f1a1f75a5 100644 --- a/provider/terraform/resources/resource_compute_backend_service.go +++ b/provider/terraform/resources/resource_compute_backend_service.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -159,7 +160,9 @@ func resourceComputeBackendService() *schema.Resource { }, "custom_request_headers": &schema.Schema{ +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeSet, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, diff --git a/provider/terraform/resources/resource_compute_global_forwarding_rule.go b/provider/terraform/resources/resource_compute_global_forwarding_rule.go.erb similarity index 99% rename from provider/terraform/resources/resource_compute_global_forwarding_rule.go rename to provider/terraform/resources/resource_compute_global_forwarding_rule.go.erb index a775861d2484..b842067dfddd 100644 --- a/provider/terraform/resources/resource_compute_global_forwarding_rule.go +++ b/provider/terraform/resources/resource_compute_global_forwarding_rule.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -56,7 +57,9 @@ func resourceComputeGlobalForwardingRule() *schema.Resource { }, "labels": &schema.Schema{ +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeMap, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, diff --git a/provider/terraform/resources/resource_compute_instance_group_manager.go b/provider/terraform/resources/resource_compute_instance_group_manager.go.erb similarity index 80% rename from provider/terraform/resources/resource_compute_instance_group_manager.go rename to provider/terraform/resources/resource_compute_instance_group_manager.go.erb index 584316b56d9c..e52fae538a57 100644 --- a/provider/terraform/resources/resource_compute_instance_group_manager.go +++ b/provider/terraform/resources/resource_compute_instance_group_manager.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -37,17 +38,25 @@ func resourceComputeInstanceGroupManager() *schema.Resource { ForceNew: true, }, +<% if version.nil? || version == 'ga' -%> "instance_template": &schema.Schema{ Type: schema.TypeString, Optional: true, DiffSuppressFunc: compareSelfLinkRelativePaths, }, +<% end -%> "version": &schema.Schema{ +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeList, +<% if version.nil? || version == 'ga' -%> Optional: true, Computed: true, +<% else -%> + Required: true, +<% end -%> Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "name": &schema.Schema{ @@ -114,7 +123,11 @@ func resourceComputeInstanceGroupManager() *schema.Resource { }, "named_port": &schema.Schema{ +<% if version.nil? || version == 'ga' -%> Type: schema.TypeList, +<% else -%> + Type: schema.TypeSet, +<% end -%> Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -143,6 +156,7 @@ func resourceComputeInstanceGroupManager() *schema.Resource { Computed: true, }, +<% if version.nil? || version == 'ga' -%> "update_strategy": &schema.Schema{ Type: schema.TypeString, Optional: true, @@ -158,6 +172,7 @@ func resourceComputeInstanceGroupManager() *schema.Resource { return false }, }, +<% end -%> "target_pools": &schema.Schema{ Type: schema.TypeSet, @@ -175,7 +190,9 @@ func resourceComputeInstanceGroupManager() *schema.Resource { }, "auto_healing_policies": &schema.Schema{ +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeList, Optional: true, MaxItems: 1, @@ -196,8 +213,12 @@ func resourceComputeInstanceGroupManager() *schema.Resource { }, }, +<% if version.nil? || version == 'ga' -%> "rolling_update_policy": &schema.Schema{ Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% else -%> + "update_policy": &schema.Schema{ +<% end -%> Type: schema.TypeList, Optional: true, MaxItems: 1, @@ -218,28 +239,46 @@ func resourceComputeInstanceGroupManager() *schema.Resource { "max_surge_fixed": &schema.Schema{ Type: schema.TypeInt, Optional: true, +<% if version.nil? || version == 'ga' -%> Default: 1, ConflictsWith: []string{"rolling_update_policy.0.max_surge_percent"}, +<% else -%> + Computed: true, + ConflictsWith: []string{"update_policy.0.max_surge_percent"}, +<% end -%> }, "max_surge_percent": &schema.Schema{ Type: schema.TypeInt, Optional: true, +<% if version.nil? || version == 'ga' -%> ConflictsWith: []string{"rolling_update_policy.0.max_surge_fixed"}, +<% else -%> + ConflictsWith: []string{"update_policy.0.max_surge_fixed"}, +<% end -%> ValidateFunc: validation.IntBetween(0, 100), }, "max_unavailable_fixed": &schema.Schema{ Type: schema.TypeInt, Optional: true, +<% if version.nil? || version == 'ga' -%> Default: 1, ConflictsWith: []string{"rolling_update_policy.0.max_unavailable_percent"}, +<% else -%> + Computed: true, + ConflictsWith: []string{"update_policy.0.max_unavailable_percent"}, +<% end -%> }, "max_unavailable_percent": &schema.Schema{ Type: schema.TypeInt, Optional: true, +<% if version.nil? || version == 'ga' -%> ConflictsWith: []string{"rolling_update_policy.0.max_unavailable_fixed"}, +<% else -%> + ConflictsWith: []string{"update_policy.0.max_unavailable_fixed"}, +<% end -%> ValidateFunc: validation.IntBetween(0, 100), }, @@ -300,21 +339,32 @@ func resourceComputeInstanceGroupManagerCreate(d *schema.ResourceData, meta inte return err } +<% if version.nil? || version == 'ga' -%> if _, ok := d.GetOk("rolling_update_policy"); d.Get("update_strategy") == "ROLLING_UPDATE" && !ok { return fmt.Errorf("[rolling_update_policy] must be set when 'update_strategy' is set to 'ROLLING_UPDATE'") } +<% end -%> // Build the parameter manager := &computeBeta.InstanceGroupManager{ Name: d.Get("name").(string), Description: d.Get("description").(string), BaseInstanceName: d.Get("base_instance_name").(string), +<% if version.nil? || version == 'ga' -%> InstanceTemplate: d.Get("instance_template").(string), +<% end -%> TargetSize: int64(d.Get("target_size").(int)), +<% if version.nil? || version == 'ga' -%> NamedPorts: getNamedPortsBeta(d.Get("named_port").([]interface{})), +<% else -%> + NamedPorts: getNamedPortsBeta(d.Get("named_port").(*schema.Set).List()), +<% end -%> TargetPools: convertStringSet(d.Get("target_pools").(*schema.Set)), AutoHealingPolicies: expandAutoHealingPolicies(d.Get("auto_healing_policies").([]interface{})), Versions: expandVersions(d.Get("version").([]interface{})), +<% unless version.nil? || version == 'ga' -%> + UpdatePolicy: expandUpdatePolicy(d.Get("update_policy").([]interface{})), +<% end -%> // Force send TargetSize to allow a value of 0. ForceSendFields: []string{"TargetSize"}, } @@ -438,12 +488,19 @@ func resourceComputeInstanceGroupManagerRead(d *schema.ResourceData, meta interf } manager, err := getManager(d, meta) - if err != nil || manager == nil { + if err != nil { return err } + if manager == nil { + log.Printf("[WARN] Instance Group Manager %q not found, removing from state.", d.Id()) + d.SetId("") + return nil + } d.Set("base_instance_name", manager.BaseInstanceName) +<% if version.nil? || version == 'ga' -%> d.Set("instance_template", ConvertSelfLinkToV1(manager.InstanceTemplate)) +<% end -%> if err := d.Set("version", flattenVersions(manager.Versions)); err != nil { return err } @@ -452,17 +509,30 @@ func resourceComputeInstanceGroupManagerRead(d *schema.ResourceData, meta interf d.Set("description", manager.Description) d.Set("project", project) d.Set("target_size", manager.TargetSize) - d.Set("target_pools", manager.TargetPools) - d.Set("named_port", flattenNamedPortsBeta(manager.NamedPorts)) + if err = d.Set("target_pools", manager.TargetPools); err != nil { + return fmt.Errorf("Error setting target_pools in state: %s", err.Error()) + } + if err = d.Set("named_port", flattenNamedPortsBeta(manager.NamedPorts)); err != nil { + return fmt.Errorf("Error setting named_port in state: %s", err.Error()) + } d.Set("fingerprint", manager.Fingerprint) d.Set("instance_group", ConvertSelfLinkToV1(manager.InstanceGroup)) d.Set("self_link", ConvertSelfLinkToV1(manager.SelfLink)) + +<% if version.nil? || version == 'ga' -%> update_strategy, ok := d.GetOk("update_strategy") if !ok { update_strategy = "REPLACE" } d.Set("update_strategy", update_strategy.(string)) - d.Set("auto_healing_policies", flattenAutoHealingPolicies(manager.AutoHealingPolicies)) +<% else -%> + if err = d.Set("update_policy", flattenUpdatePolicy(manager.UpdatePolicy)); err != nil { + return fmt.Errorf("Error setting update_policy in state: %s", err.Error()) + } +<% end -%> + if err = d.Set("auto_healing_policies", flattenAutoHealingPolicies(manager.AutoHealingPolicies)); err != nil { + return fmt.Errorf("Error setting auto_healing_policies in state: %s", err.Error()) + } if d.Get("wait_for_instances").(bool) { conf := resource.StateChangeConf{ @@ -480,6 +550,8 @@ func resourceComputeInstanceGroupManagerRead(d *schema.ResourceData, meta interf return nil } + +<% if version.nil? || version == 'ga' -%> // Updates an instance group manager by applying the update strategy (REPLACE, RESTART) // and rolling update policy (PROACTIVE, OPPORTUNISTIC). Updates performed by API // are OPPORTUNISTIC by default. @@ -698,6 +770,103 @@ func resourceComputeInstanceGroupManagerUpdate(d *schema.ResourceData, meta inte return resourceComputeInstanceGroupManagerRead(d, meta) } +<% else -%> +func resourceComputeInstanceGroupManagerUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + project, err := getProject(d, config) + if err != nil { + return err + } + + zone, err := getZone(d, config) + if err != nil { + return err + } + + updatedManager := &computeBeta.InstanceGroupManager{ + Fingerprint: d.Get("fingerprint").(string), + } + var change bool + + if d.HasChange("target_pools") { + updatedManager.TargetPools = convertStringSet(d.Get("target_pools").(*schema.Set)) + change = true + } + + if d.HasChange("auto_healing_policies") { + updatedManager.AutoHealingPolicies = expandAutoHealingPolicies(d.Get("auto_healing_policies").([]interface{})) + updatedManager.ForceSendFields = append(updatedManager.ForceSendFields, "AutoHealingPolicies") + change = true + } + + if d.HasChange("version") { + updatedManager.Versions = expandVersions(d.Get("version").([]interface{})) + change = true + } + + if d.HasChange("update_policy") { + updatedManager.UpdatePolicy = expandUpdatePolicy(d.Get("update_policy").([]interface{})) + change = true + } + + if change { + op, err := config.clientComputeBeta.InstanceGroupManagers.Patch(project, zone, d.Get("name").(string), updatedManager).Do() + if err != nil { + return fmt.Errorf("Error updating managed group instances: %s", err) + } + + err = computeSharedOperationWait(config.clientCompute, op, project, "Updating managed group instances") + if err != nil { + return err + } + } + + // named ports can't be updated through PATCH + // so we call the update method on the instance group, instead of the igm + if d.HasChange("named_port") { + + // Build the parameters for a "SetNamedPorts" request: + namedPorts := getNamedPortsBeta(d.Get("named_port").(*schema.Set).List()) + setNamedPorts := &computeBeta.InstanceGroupsSetNamedPortsRequest{ + NamedPorts: namedPorts, + } + + // Make the request: + op, err := config.clientComputeBeta.InstanceGroups.SetNamedPorts( + project, zone, d.Get("name").(string), setNamedPorts).Do() + + if err != nil { + return fmt.Errorf("Error updating InstanceGroupManager: %s", err) + } + + // Wait for the operation to complete: + err = computeSharedOperationWait(config.clientCompute, op, project, "Updating InstanceGroupManager") + if err != nil { + return err + } + } + + // target_size should be updated through resize + if d.HasChange("target_size") { + targetSize := int64(d.Get("target_size").(int)) + op, err := config.clientComputeBeta.InstanceGroupManagers.Resize( + project, zone, d.Get("name").(string), targetSize).Do() + + if err != nil { + return fmt.Errorf("Error updating InstanceGroupManager: %s", err) + } + + // Wait for the operation to complete + err = computeSharedOperationWait(config.clientCompute, op, project, "Updating InstanceGroupManager") + if err != nil { + return err + } + } + + return resourceComputeInstanceGroupManagerRead(d, meta) +} +<% end -%> func resourceComputeInstanceGroupManagerDelete(d *schema.ResourceData, meta interface{}) error { config := meta.(*Config) @@ -823,24 +992,36 @@ func expandUpdatePolicy(configured []interface{}) *computeBeta.InstanceGroupMana if v := data["max_surge_percent"]; v.(int) > 0 { updatePolicy.MaxSurge = &computeBeta.FixedOrPercent{ Percent: int64(v.(int)), +<% unless version.nil? || version == 'ga' -%> + NullFields: []string{"Fixed"}, +<% end -%> } } else { updatePolicy.MaxSurge = &computeBeta.FixedOrPercent{ Fixed: int64(data["max_surge_fixed"].(int)), // allow setting this value to 0 ForceSendFields: []string{"Fixed"}, +<% unless version.nil? || version == 'ga' -%> + NullFields: []string{"Percent"}, +<% end -%> } } if v := data["max_unavailable_percent"]; v.(int) > 0 { updatePolicy.MaxUnavailable = &computeBeta.FixedOrPercent{ Percent: int64(v.(int)), +<% unless version.nil? || version == 'ga' -%> + NullFields: []string{"Fixed"}, +<% end -%> } } else { updatePolicy.MaxUnavailable = &computeBeta.FixedOrPercent{ Fixed: int64(data["max_unavailable_fixed"].(int)), // allow setting this value to 0 ForceSendFields: []string{"Fixed"}, +<% unless version.nil? || version == 'ga' -%> + NullFields: []string{"Percent"}, +<% end -%> } } @@ -864,6 +1045,34 @@ func flattenAutoHealingPolicies(autoHealingPolicies []*computeBeta.InstanceGroup return autoHealingPoliciesSchema } +<% unless version.nil? || version == 'ga' -%> +func flattenUpdatePolicy(updatePolicy *computeBeta.InstanceGroupManagerUpdatePolicy) []map[string]interface{} { + results := []map[string]interface{}{} + if updatePolicy != nil { + up := map[string]interface{}{} + if updatePolicy.MaxSurge != nil { + up["max_surge_fixed"] = updatePolicy.MaxSurge.Fixed + up["max_surge_percent"] = updatePolicy.MaxSurge.Percent + } else { + up["max_surge_fixed"] = 0 + up["max_surge_percent"] = 0 + } + if updatePolicy.MaxUnavailable != nil { + up["max_unavailable_fixed"] = updatePolicy.MaxUnavailable.Fixed + up["max_unavailable_percent"] = updatePolicy.MaxUnavailable.Percent + } else { + up["max_unavailable_fixed"] = 0 + up["max_unavailable_percent"] = 0 + } + up["min_ready_sec"] = updatePolicy.MinReadySec + up["minimal_action"] = updatePolicy.MinimalAction + up["type"] = updatePolicy.Type + results = append(results, up) + } + return results +} +<% end -%> + func resourceInstanceGroupManagerStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { d.Set("wait_for_instances", false) zonalID, err := parseInstanceGroupManagerId(d.Id()) diff --git a/provider/terraform/resources/resource_compute_network.go b/provider/terraform/resources/resource_compute_network.go index db1efeba9c88..c30054e9901f 100644 --- a/provider/terraform/resources/resource_compute_network.go +++ b/provider/terraform/resources/resource_compute_network.go @@ -54,7 +54,7 @@ func resourceComputeNetwork() *schema.Resource { Optional: true, ForceNew: true, // This needs to remain deprecated until the API is retired - Removed: "Please use google_compute_subnetwork resources instead.", + Deprecated: "Please use google_compute_subnetwork resources instead.", }, "project": &schema.Schema{ @@ -89,6 +89,9 @@ func resourceComputeNetworkCreate(d *schema.ResourceData, meta interface{}) erro // - 2.b - Custom subnet mode - auto_create_subnetworks = false & ipv4_range not set, // autoCreateSubnetworks := d.Get("auto_create_subnetworks").(bool) + if autoCreateSubnetworks && d.Get("ipv4_range").(string) != "" { + return fmt.Errorf("ipv4_range can't be set if auto_create_subnetworks is true.") + } // Build the network parameter network := &compute.Network{ @@ -104,10 +107,14 @@ func resourceComputeNetworkCreate(d *schema.ResourceData, meta interface{}) erro network.RoutingConfig = routingConfig } - // make sure AutoCreateSubnetworks field is included in request otherwise - // google will create a network in legacy mode. - network.ForceSendFields = []string{"AutoCreateSubnetworks"} - + if v, ok := d.GetOk("ipv4_range"); ok { + log.Printf("[DEBUG] Setting IPv4Range (%#v) for legacy network mode", v.(string)) + network.IPv4Range = v.(string) + } else { + // custom subnet mode, so make sure AutoCreateSubnetworks field is included in request otherwise + // google will create a network in legacy mode. + network.ForceSendFields = []string{"AutoCreateSubnetworks"} + } log.Printf("[DEBUG] Network insert request: %#v", network) op, err := config.clientCompute.Networks.Insert( project, network).Do() @@ -144,6 +151,7 @@ func resourceComputeNetworkRead(d *schema.ResourceData, meta interface{}) error d.Set("routing_mode", routingConfig.RoutingMode) d.Set("gateway_ipv4", network.GatewayIPv4) + d.Set("ipv4_range", network.IPv4Range) d.Set("self_link", network.SelfLink) d.Set("name", network.Name) d.Set("description", network.Description) diff --git a/provider/terraform/resources/resource_compute_region_instance_group_manager.go b/provider/terraform/resources/resource_compute_region_instance_group_manager.go.erb similarity index 81% rename from provider/terraform/resources/resource_compute_region_instance_group_manager.go rename to provider/terraform/resources/resource_compute_region_instance_group_manager.go.erb index b11f7dfc29cf..14c059911022 100644 --- a/provider/terraform/resources/resource_compute_region_instance_group_manager.go +++ b/provider/terraform/resources/resource_compute_region_instance_group_manager.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -42,17 +43,23 @@ func resourceComputeRegionInstanceGroupManager() *schema.Resource { ForceNew: true, }, +<% if version.nil? || version == 'ga' -%> "instance_template": &schema.Schema{ Type: schema.TypeString, Optional: true, DiffSuppressFunc: compareSelfLinkRelativePaths, }, +<% end -%> "version": &schema.Schema{ Type: schema.TypeList, +<% if version.nil? || version == 'ga' -%> Optional: true, Computed: true, Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% else -%> + Required: true, +<% end -%> Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "name": &schema.Schema{ @@ -118,7 +125,11 @@ func resourceComputeRegionInstanceGroupManager() *schema.Resource { }, "named_port": &schema.Schema{ +<% if version.nil? || version == 'ga' -%> Type: schema.TypeList, +<% else -%> + Type: schema.TypeSet, +<% end -%> Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -147,12 +158,14 @@ func resourceComputeRegionInstanceGroupManager() *schema.Resource { Computed: true, }, +<% if version.nil? || version == 'ga' -%> "update_strategy": &schema.Schema{ Type: schema.TypeString, Optional: true, Default: "NONE", ValidateFunc: validation.StringInSlice([]string{"NONE", "ROLLING_UPDATE"}, false), }, +<% end -%> "target_pools": &schema.Schema{ Type: schema.TypeSet, @@ -181,7 +194,9 @@ func resourceComputeRegionInstanceGroupManager() *schema.Resource { Type: schema.TypeList, Optional: true, MaxItems: 1, +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "health_check": &schema.Schema{ @@ -211,11 +226,16 @@ func resourceComputeRegionInstanceGroupManager() *schema.Resource { }, }, +<% if version.nil? || version == 'ga' -%> "rolling_update_policy": &schema.Schema{ + Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% else -%> + "update_policy": &schema.Schema{ + Computed: true, +<% end -%> Type: schema.TypeList, Optional: true, MaxItems: 1, - Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "minimal_action": &schema.Schema{ @@ -233,28 +253,46 @@ func resourceComputeRegionInstanceGroupManager() *schema.Resource { "max_surge_fixed": &schema.Schema{ Type: schema.TypeInt, Optional: true, +<% if version.nil? || version == 'ga' -%> Default: 0, ConflictsWith: []string{"rolling_update_policy.0.max_surge_percent"}, +<% else -%> + Computed: true, + ConflictsWith: []string{"update_policy.0.max_surge_percent"}, +<% end -%> }, "max_surge_percent": &schema.Schema{ Type: schema.TypeInt, Optional: true, +<% if version.nil? || version == 'ga' -%> ConflictsWith: []string{"rolling_update_policy.0.max_surge_fixed"}, +<% else -%> + ConflictsWith: []string{"update_policy.0.max_surge_fixed"}, +<% end -%> ValidateFunc: validation.IntBetween(0, 100), }, "max_unavailable_fixed": &schema.Schema{ Type: schema.TypeInt, Optional: true, +<% if version.nil? || version == 'ga' -%> Default: 0, ConflictsWith: []string{"rolling_update_policy.0.max_unavailable_percent"}, +<% else -%> + Computed: true, + ConflictsWith: []string{"update_policy.0.max_unavailable_percent"}, +<% end -%> }, "max_unavailable_percent": &schema.Schema{ Type: schema.TypeInt, Optional: true, +<% if version.nil? || version == 'ga' -%> ConflictsWith: []string{"rolling_update_policy.0.max_unavailable_fixed"}, +<% else -%> + ConflictsWith: []string{"update_policy.0.max_unavailable_fixed"}, +<% end -%> ValidateFunc: validation.IntBetween(0, 100), }, @@ -283,20 +321,31 @@ func resourceComputeRegionInstanceGroupManagerCreate(d *schema.ResourceData, met return err } +<% if version.nil? || version == 'ga' -%> if _, ok := d.GetOk("rolling_update_policy"); d.Get("update_strategy") == "ROLLING_UPDATE" && !ok { return fmt.Errorf("[rolling_update_policy] must be set when 'update_strategy' is set to 'ROLLING_UPDATE'") } +<% end -%> manager := &computeBeta.InstanceGroupManager{ Name: d.Get("name").(string), Description: d.Get("description").(string), BaseInstanceName: d.Get("base_instance_name").(string), +<% if version.nil? || version == 'ga' -%> InstanceTemplate: d.Get("instance_template").(string), +<% end -%> TargetSize: int64(d.Get("target_size").(int)), +<% if version.nil? || version == 'ga' -%> NamedPorts: getNamedPortsBeta(d.Get("named_port").([]interface{})), +<% else -%> + NamedPorts: getNamedPortsBeta(d.Get("named_port").(*schema.Set).List()), +<% end -%> TargetPools: convertStringSet(d.Get("target_pools").(*schema.Set)), AutoHealingPolicies: expandAutoHealingPolicies(d.Get("auto_healing_policies").([]interface{})), Versions: expandVersions(d.Get("version").([]interface{})), +<% unless version.nil? || version == 'ga' -%> + UpdatePolicy: expandUpdatePolicy(d.Get("update_policy").([]interface{})), +<% end -%> DistributionPolicy: expandDistributionPolicy(d.Get("distribution_policy_zones").(*schema.Set)), // Force send TargetSize to allow size of 0. ForceSendFields: []string{"TargetSize"}, @@ -368,9 +417,14 @@ func waitForInstancesRefreshFunc(f getInstanceManagerFunc, d *schema.ResourceDat func resourceComputeRegionInstanceGroupManagerRead(d *schema.ResourceData, meta interface{}) error { config := meta.(*Config) manager, err := getRegionalManager(d, meta) - if err != nil || manager == nil { + if err != nil { return err } + if manager == nil { + log.Printf("[WARN] Region Instance Group Manager %q not found, removing from state.", d.Id()) + d.SetId("") + return nil + } regionalID, err := parseRegionInstanceGroupManagerId(d.Id()) if err != nil { @@ -384,7 +438,9 @@ func resourceComputeRegionInstanceGroupManagerRead(d *schema.ResourceData, meta } d.Set("base_instance_name", manager.BaseInstanceName) - d.Set("instance_template", manager.InstanceTemplate) +<% if version.nil? || version == 'ga' -%> + d.Set("instance_template", ConvertSelfLinkToV1(manager.InstanceTemplate)) +<% end -%> if err := d.Set("version", flattenVersions(manager.Versions)); err != nil { return err } @@ -393,20 +449,32 @@ func resourceComputeRegionInstanceGroupManagerRead(d *schema.ResourceData, meta d.Set("description", manager.Description) d.Set("project", regionalID.Project) d.Set("target_size", manager.TargetSize) - d.Set("target_pools", manager.TargetPools) - d.Set("named_port", flattenNamedPortsBeta(manager.NamedPorts)) + if err := d.Set("target_pools", manager.TargetPools); err != nil { + return fmt.Errorf("Error setting target_pools in state: %s", err.Error()) + } + if err := d.Set("named_port", flattenNamedPortsBeta(manager.NamedPorts)); err != nil { + return fmt.Errorf("Error setting named_port in state: %s", err.Error()) + } d.Set("fingerprint", manager.Fingerprint) d.Set("instance_group", ConvertSelfLinkToV1(manager.InstanceGroup)) - d.Set("auto_healing_policies", flattenAutoHealingPolicies(manager.AutoHealingPolicies)) + if err := d.Set("auto_healing_policies", flattenAutoHealingPolicies(manager.AutoHealingPolicies)); err != nil { + return fmt.Errorf("Error setting auto_healing_policies in state: %s", err.Error()) + } if err := d.Set("distribution_policy_zones", flattenDistributionPolicy(manager.DistributionPolicy)); err != nil { return err } d.Set("self_link", ConvertSelfLinkToV1(manager.SelfLink)) +<% if version.nil? || version == 'ga' -%> update_strategy, ok := d.GetOk("update_strategy") if !ok { update_strategy = "NONE" } d.Set("update_strategy", update_strategy.(string)) +<% else -%> + if err := d.Set("update_policy", flattenUpdatePolicy(manager.UpdatePolicy)); err != nil { + return fmt.Errorf("Error setting update_policy in state: %s", err.Error()) + } +<% end -%> if d.Get("wait_for_instances").(bool) { conf := resource.StateChangeConf{ @@ -424,6 +492,7 @@ func resourceComputeRegionInstanceGroupManagerRead(d *schema.ResourceData, meta return nil } +<% if version.nil? || version == 'ga' -%> // Updates an instance group manager by applying the update strategy (REPLACE, RESTART) // and rolling update policy (PROACTIVE, OPPORTUNISTIC). Updates performed by API // are OPPORTUNISTIC by default. @@ -631,6 +700,98 @@ func resourceComputeRegionInstanceGroupManagerUpdate(d *schema.ResourceData, met return resourceComputeRegionInstanceGroupManagerRead(d, meta) } +<% else -%> +func resourceComputeRegionInstanceGroupManagerUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + project, err := getProject(d, config) + if err != nil { + return err + } + + region, err := getRegion(d, config) + if err != nil { + return err + } + + updatedManager := &computeBeta.InstanceGroupManager{ + Fingerprint: d.Get("fingerprint").(string), + } + var change bool + + if d.HasChange("target_pools") { + updatedManager.TargetPools = convertStringSet(d.Get("target_pools").(*schema.Set)) + change = true + } + + if d.HasChange("auto_healing_policies") { + updatedManager.AutoHealingPolicies = expandAutoHealingPolicies(d.Get("auto_healing_policies").([]interface{})) + updatedManager.ForceSendFields = append(updatedManager.ForceSendFields, "AutoHealingPolicies") + change = true + } + + if d.HasChange("version") { + updatedManager.Versions = expandVersions(d.Get("version").([]interface{})) + change = true + } + + if d.HasChange("update_policy") { + updatedManager.UpdatePolicy = expandUpdatePolicy(d.Get("update_policy").([]interface{})) + change = true + } + + if change { + op, err := config.clientComputeBeta.RegionInstanceGroupManagers.Patch(project, region, d.Get("name").(string), updatedManager).Do() + if err != nil { + return fmt.Errorf("Error updating region managed group instances: %s", err) + } + + err = computeSharedOperationWait(config.clientCompute, op, project, "Updating region managed group instances") + if err != nil { + return err + } + } + + // named ports can't be updated through PATCH + // so we call the update method on the region instance group, instead of the rigm + if d.HasChange("named_port") { + namedPorts := getNamedPortsBeta(d.Get("named_port").(*schema.Set).List()) + setNamedPorts := &computeBeta.RegionInstanceGroupsSetNamedPortsRequest{ + NamedPorts: namedPorts, + } + + op, err := config.clientComputeBeta.RegionInstanceGroups.SetNamedPorts( + project, region, d.Get("name").(string), setNamedPorts).Do() + + if err != nil { + return fmt.Errorf("Error updating RegionInstanceGroupManager: %s", err) + } + + err = computeSharedOperationWait(config.clientCompute, op, project, "Updating RegionInstanceGroupManager") + if err != nil { + return err + } + } + + // target size should use resize + if d.HasChange("target_size") { + targetSize := int64(d.Get("target_size").(int)) + op, err := config.clientComputeBeta.RegionInstanceGroupManagers.Resize( + project, region, d.Get("name").(string), targetSize).Do() + + if err != nil { + return fmt.Errorf("Error resizing RegionInstanceGroupManager: %s", err) + } + + err = computeSharedOperationWait(config.clientCompute, op, project, "Resizing RegionInstanceGroupManager") + if err != nil { + return err + } + } + + return resourceComputeRegionInstanceGroupManagerRead(d, meta) +} +<% end -%> func resourceComputeRegionInstanceGroupManagerDelete(d *schema.ResourceData, meta interface{}) error { config := meta.(*Config) diff --git a/provider/terraform/resources/resource_container_cluster.go b/provider/terraform/resources/resource_container_cluster.go.erb similarity index 94% rename from provider/terraform/resources/resource_container_cluster.go rename to provider/terraform/resources/resource_container_cluster.go.erb index e686f9e656d0..81f1281d9fb8 100644 --- a/provider/terraform/resources/resource_container_cluster.go +++ b/provider/terraform/resources/resource_container_cluster.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -97,7 +98,9 @@ func resourceContainerCluster() *schema.Resource { }, "region": { +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeString, Optional: true, Computed: true, @@ -202,7 +205,9 @@ func resourceContainerCluster() *schema.Resource { }, "enable_binary_authorization": { +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeBool, Optional: true, Default: false, @@ -216,7 +221,9 @@ func resourceContainerCluster() *schema.Resource { }, "enable_tpu": { +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeBool, Optional: true, ForceNew: true, @@ -395,7 +402,9 @@ func resourceContainerCluster() *schema.Resource { }, "pod_security_policy_config": { +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeList, Optional: true, MaxItems: 1, @@ -505,15 +514,65 @@ func resourceContainerCluster() *schema.Resource { }, "private_cluster": { +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", + Default: false, +<% else -%> + Deprecated: "Use private_cluster_config.enable_private_nodes instead.", + ConflictsWith: []string{"private_cluster_config"}, + Computed: true, +<% end -%> Type: schema.TypeBool, Optional: true, ForceNew: true, - Default: false, }, +<% unless version.nil? || version == 'ga' -%> + "private_cluster_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Computed: true, + ConflictsWith: []string{"private_cluster", "master_ipv4_cidr_block"}, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enable_private_endpoint": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + }, + "enable_private_nodes": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + }, + "master_ipv4_cidr_block": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.CIDRNetwork(28, 28), + }, + "private_endpoint": { + Type: schema.TypeString, + Computed: true, + }, + "public_endpoint": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, +<% end -%> + "master_ipv4_cidr_block": { - Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% if version.nil? || version == 'ga' -%> + Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% else -%> + Deprecated: "Use private_cluster_config.master_ipv4_cidr_block instead.", + ConflictsWith: []string{"private_cluster_config"}, + Computed: true, +<% end -%> Type: schema.TypeString, Optional: true, ForceNew: true, @@ -651,6 +710,12 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er } } +<% unless version.nil? || version == 'ga' -%> + if v, ok := d.GetOk("private_cluster_config"); ok { + cluster.PrivateClusterConfig = expandPrivateClusterConfig(v) + } +<% end -%> + req := &containerBeta.CreateClusterRequest{ Cluster: cluster, } @@ -773,6 +838,12 @@ func resourceContainerClusterRead(d *schema.ResourceData, meta interface{}) erro return err } +<% unless version.nil? || version == 'ga' -%> + if err := d.Set("private_cluster_config", flattenPrivateClusterConfig(cluster.PrivateClusterConfig)); err != nil { + return err + } +<% end -%> + igUrls, err := getInstanceGroupUrlsFromManagerUrls(config, cluster.InstanceGroupUrls) if err != nil { return err @@ -1509,6 +1580,22 @@ func expandNetworkPolicy(configured interface{}) *containerBeta.NetworkPolicy { return result } +<% unless version.nil? || version == 'ga' -%> +func expandPrivateClusterConfig(configured interface{}) *containerBeta.PrivateClusterConfig { + l := configured.([]interface{}) + if len(l) == 0 { + return nil + } + config := l[0].(map[string]interface{}) + return &containerBeta.PrivateClusterConfig{ + EnablePrivateEndpoint: config["enable_private_endpoint"].(bool), + EnablePrivateNodes: config["enable_private_nodes"].(bool), + MasterIpv4CidrBlock: config["master_ipv4_cidr_block"].(string), + ForceSendFields: []string{"EnablePrivateEndpoint", "EnablePrivateNodes", "MasterIpv4CidrBlock"}, + } +} +<% end -%> + func expandPodSecurityPolicyConfig(configured interface{}) *containerBeta.PodSecurityPolicyConfig { l := configured.([]interface{}) if len(l) == 0 || l[0] == nil { @@ -1590,6 +1677,23 @@ func flattenClusterNodePools(d *schema.ResourceData, config *Config, c []*contai return nodePools, nil } +<% unless version.nil? || version == 'ga' -%> +func flattenPrivateClusterConfig(c *containerBeta.PrivateClusterConfig) []map[string]interface{} { + if c == nil { + return nil + } + return []map[string]interface{}{ + { + "enable_private_endpoint": c.EnablePrivateEndpoint, + "enable_private_nodes": c.EnablePrivateNodes, + "master_ipv4_cidr_block": c.MasterIpv4CidrBlock, + "private_endpoint": c.PrivateEndpoint, + "public_endpoint": c.PublicEndpoint, + }, + } +} +<% end -%> + func flattenIPAllocationPolicy(c *containerBeta.IPAllocationPolicy) []map[string]interface{} { if c == nil { return nil diff --git a/provider/terraform/resources/resource_container_node_pool.go b/provider/terraform/resources/resource_container_node_pool.go.erb similarity index 99% rename from provider/terraform/resources/resource_container_node_pool.go rename to provider/terraform/resources/resource_container_node_pool.go.erb index 009fc788d822..90d548b80e90 100644 --- a/provider/terraform/resources/resource_container_node_pool.go +++ b/provider/terraform/resources/resource_container_node_pool.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -54,7 +55,9 @@ func resourceContainerNodePool() *schema.Resource { ForceNew: true, }, "region": &schema.Schema{ +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeString, Optional: true, ForceNew: true, @@ -86,7 +89,9 @@ var schemaNodePool = map[string]*schema.Schema{ }, "max_pods_per_node": &schema.Schema{ +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeInt, Optional: true, ForceNew: true, diff --git a/provider/terraform/tests/data_source_container_registry_test.go b/provider/terraform/tests/data_source_container_registry_test.go new file mode 100644 index 000000000000..0829a5d82023 --- /dev/null +++ b/provider/terraform/tests/data_source_container_registry_test.go @@ -0,0 +1,78 @@ +package google + +import ( + "testing" + + "github.com/hashicorp/terraform/helper/resource" +) + +func TestDataSourceGoogleContainerRegistryRepository(t *testing.T) { + t.Parallel() + + resourceName := "data.google_container_registry_repository.default" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleContainerRegistryRepo_basic, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "project"), + resource.TestCheckResourceAttrSet(resourceName, "region"), + resource.TestCheckResourceAttr(resourceName, "repository_url", "bar.gcr.io/foo"), + ), + }, + }, + }) +} + +const testAccCheckGoogleContainerRegistryRepo_basic = ` +data "google_container_registry_repository" "default" { + project = "foo" + region = "bar" +} +` + +func TestDataSourceGoogleContainerRegistryImage(t *testing.T) { + t.Parallel() + + resourceName := "data.google_container_registry_image.test" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleContainerRegistryImage_basic, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "project"), + resource.TestCheckResourceAttrSet(resourceName, "region"), + resource.TestCheckResourceAttr(resourceName, "image_url", "bar.gcr.io/foo/baz"), + resource.TestCheckResourceAttr(resourceName+"2", "image_url", "bar.gcr.io/foo/baz:qux"), + resource.TestCheckResourceAttr(resourceName+"3", "image_url", "bar.gcr.io/foo/baz@1234"), + ), + }, + }, + }) +} + +const testAccCheckGoogleContainerRegistryImage_basic = ` +data "google_container_registry_image" "test" { + project = "foo" + region = "bar" + name = "baz" +} +data "google_container_registry_image" "test2" { + project = "foo" + region = "bar" + name = "baz" + tag = "qux" +} +data "google_container_registry_image" "test3" { + project = "foo" + region = "bar" + name = "baz" + digest = "1234" +} +` diff --git a/provider/terraform/tests/data_source_dns_managed_zone_test.go b/provider/terraform/tests/data_source_dns_managed_zone_test.go new file mode 100644 index 000000000000..a794c56c9101 --- /dev/null +++ b/provider/terraform/tests/data_source_dns_managed_zone_test.go @@ -0,0 +1,73 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDataSourceDnsManagedZone_basic(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsManagedZoneDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDataSourceDnsManagedZone_basic(), + Check: testAccDataSourceDnsManagedZoneCheck("data.google_dns_managed_zone.qa", "google_dns_managed_zone.foo"), + }, + }, + }) +} + +func testAccDataSourceDnsManagedZoneCheck(dsName, rsName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[rsName] + if !ok { + return fmt.Errorf("can't find resource called %s in state", rsName) + } + + rs, ok := s.RootModule().Resources[dsName] + if !ok { + return fmt.Errorf("can't find data source called %s in state", dsName) + } + + dsAttr := ds.Primary.Attributes + rsAttr := rs.Primary.Attributes + + attrsToTest := []string{ + "id", + "name", + "description", + "dns_name", + "name_servers", + } + + for _, attrToTest := range attrsToTest { + if dsAttr[attrToTest] != rsAttr[attrToTest] { + return fmt.Errorf("%s is %s; want %s", attrToTest, dsAttr[attrToTest], rsAttr[attrToTest]) + } + } + + return nil + } +} + +func testAccDataSourceDnsManagedZone_basic() string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foo" { + name = "qa-zone-%s" + dns_name = "qa.tf-test.club." + description = "QA DNS zone" +} + +data "google_dns_managed_zone" "qa" { + name = "${google_dns_managed_zone.foo.name}" +} +`, acctest.RandString(10)) +} diff --git a/provider/terraform/tests/data_source_google_active_folder_test.go b/provider/terraform/tests/data_source_google_active_folder_test.go new file mode 100644 index 000000000000..d083c77ea58c --- /dev/null +++ b/provider/terraform/tests/data_source_google_active_folder_test.go @@ -0,0 +1,94 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDataSourceGoogleActiveFolder_default(t *testing.T) { + org := getTestOrgFromEnv(t) + + parent := fmt.Sprintf("organizations/%s", org) + displayName := "terraform-test-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDataSourceGoogleActiveFolderConfig(parent, displayName), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleActiveFolderCheck("data.google_active_folder.my_folder", "google_folder.foobar"), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleActiveFolder_space(t *testing.T) { + org := getTestOrgFromEnv(t) + + parent := fmt.Sprintf("organizations/%s", org) + displayName := "terraform test " + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDataSourceGoogleActiveFolderConfig(parent, displayName), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleActiveFolderCheck("data.google_active_folder.my_folder", "google_folder.foobar"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleActiveFolderCheck(data_source_name string, resource_name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[data_source_name] + if !ok { + return fmt.Errorf("root module has no resource called %s", data_source_name) + } + + rs, ok := s.RootModule().Resources[resource_name] + if !ok { + return fmt.Errorf("can't find %s in state", resource_name) + } + + ds_attr := ds.Primary.Attributes + rs_attr := rs.Primary.Attributes + folder_attrs_to_test := []string{"parent", "display_name", "name"} + + for _, attr_to_check := range folder_attrs_to_test { + if ds_attr[attr_to_check] != rs_attr[attr_to_check] { + return fmt.Errorf( + "%s is %s; want %s", + attr_to_check, + ds_attr[attr_to_check], + rs_attr[attr_to_check], + ) + } + } + return nil + } +} + +func testAccDataSourceGoogleActiveFolderConfig(parent string, displayName string) string { + return fmt.Sprintf(` +resource "google_folder" "foobar" { + parent = "%s" + display_name = "%s" +} + +data "google_active_folder" "my_folder" { + parent = "${google_folder.foobar.parent}" + display_name = "${google_folder.foobar.display_name}" +} +`, parent, displayName) +} diff --git a/provider/terraform/tests/data_source_google_billing_account_test.go b/provider/terraform/tests/data_source_google_billing_account_test.go new file mode 100644 index 000000000000..18352b6843d4 --- /dev/null +++ b/provider/terraform/tests/data_source_google_billing_account_test.go @@ -0,0 +1,104 @@ +package google + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccDataSourceGoogleBillingAccount_byFullName(t *testing.T) { + billingId := getTestBillingAccountFromEnv(t) + name := "billingAccounts/" + billingId + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleBillingAccount_byName(name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_billing_account.acct", "id", billingId), + resource.TestCheckResourceAttr("data.google_billing_account.acct", "name", name), + resource.TestCheckResourceAttr("data.google_billing_account.acct", "open", "true"), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleBillingAccount_byShortName(t *testing.T) { + billingId := getTestBillingAccountFromEnv(t) + name := "billingAccounts/" + billingId + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleBillingAccount_byName(billingId), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_billing_account.acct", "id", billingId), + resource.TestCheckResourceAttr("data.google_billing_account.acct", "name", name), + resource.TestCheckResourceAttr("data.google_billing_account.acct", "open", "true"), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleBillingAccount_byFullNameClosed(t *testing.T) { + billingId := getTestBillingAccountFromEnv(t) + name := "billingAccounts/" + billingId + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleBillingAccount_byNameClosed(name), + ExpectError: regexp.MustCompile("Billing account not found: " + name), + }, + }, + }) +} + +func TestAccDataSourceGoogleBillingAccount_byDisplayName(t *testing.T) { + name := acctest.RandString(16) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleBillingAccount_byDisplayName(name), + ExpectError: regexp.MustCompile("Billing account not found: " + name), + }, + }, + }) +} + +func testAccCheckGoogleBillingAccount_byName(name string) string { + return fmt.Sprintf(` +data "google_billing_account" "acct" { + billing_account = "%s" +}`, name) +} + +func testAccCheckGoogleBillingAccount_byNameClosed(name string) string { + return fmt.Sprintf(` +data "google_billing_account" "acct" { + billing_account = "%s" + open = false +}`, name) +} + +func testAccCheckGoogleBillingAccount_byDisplayName(name string) string { + return fmt.Sprintf(` +data "google_billing_account" "acct" { + display_name = "%s" +}`, name) +} diff --git a/provider/terraform/tests/data_source_google_client_config_test.go b/provider/terraform/tests/data_source_google_client_config_test.go new file mode 100644 index 000000000000..b6b355e5c20d --- /dev/null +++ b/provider/terraform/tests/data_source_google_client_config_test.go @@ -0,0 +1,32 @@ +package google + +import ( + "testing" + + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccDataSourceGoogleClientConfig_basic(t *testing.T) { + t.Parallel() + + resourceName := "data.google_client_config.current" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleClientConfig_basic, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "project"), + resource.TestCheckResourceAttrSet(resourceName, "region"), + resource.TestCheckResourceAttrSet(resourceName, "access_token"), + ), + }, + }, + }) +} + +const testAccCheckGoogleClientConfig_basic = ` +data "google_client_config" "current" { } +` diff --git a/provider/terraform/tests/data_source_google_cloudfunctions_function_test.go b/provider/terraform/tests/data_source_google_cloudfunctions_function_test.go new file mode 100644 index 000000000000..e743e857ba4e --- /dev/null +++ b/provider/terraform/tests/data_source_google_cloudfunctions_function_test.go @@ -0,0 +1,154 @@ +package google + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDataSourceGoogleCloudFunctionsFunction_basic(t *testing.T) { + t.Parallel() + + funcDataNameHttp := "data.google_cloudfunctions_function.function_http" + funcDataNamePubSub := "data.google_cloudfunctions_function.function_pubsub" + funcDataNameBucket := "data.google_cloudfunctions_function.function_bucket" + functionName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + bucketName := fmt.Sprintf("tf-test-bucket-%d", acctest.RandInt()) + topicName := fmt.Sprintf("tf-test-sub-%s", acctest.RandString(10)) + zipFilePath, err := createZIPArchiveForIndexJs(testHTTPTriggerPath) + if err != nil { + t.Fatal(err.Error()) + } + defer os.Remove(zipFilePath) // clean up + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckCloudFunctionsFunctionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleCloudFunctionsFunctionConfig(functionName, + bucketName, zipFilePath, topicName), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleCloudFunctionsFunctionCheck(funcDataNameHttp, + "google_cloudfunctions_function.function_http"), + testAccDataSourceGoogleCloudFunctionsFunctionCheck(funcDataNamePubSub, + "google_cloudfunctions_function.function_pubsub"), + testAccDataSourceGoogleCloudFunctionsFunctionCheck(funcDataNameBucket, + "google_cloudfunctions_function.function_bucket"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleCloudFunctionsFunctionCheck(dataSourceName string, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("root module has no resource called %s", dataSourceName) + } + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("can't find %s in state", resourceName) + } + + dsAttr := ds.Primary.Attributes + rsAttr := rs.Primary.Attributes + + cloudFuncAttrToCheck := []string{ + "name", + "region", + "description", + "available_memory_mb", + "timeout", + "storage_bucket", + "storage_object", + "entry_point", + "trigger_http", + "trigger_bucket", + "trigger_topic", + } + + for _, attr := range cloudFuncAttrToCheck { + if dsAttr[attr] != rsAttr[attr] { + return fmt.Errorf( + "%s is %s; want %s", + attr, + dsAttr[attr], + rsAttr[attr], + ) + } + } + + return nil + } +} + +func testAccDataSourceGoogleCloudFunctionsFunctionConfig(functionName string, + bucketName string, zipFilePath string, topicName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "archive" { + name = "index.zip" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" +} + +resource "google_cloudfunctions_function" "function_http" { + name = "%s-http" + description = "test function" + available_memory_mb = 128 + source_archive_bucket = "${google_storage_bucket.bucket.name}" + source_archive_object = "${google_storage_bucket_object.archive.name}" + trigger_http = true + timeout = 61 + entry_point = "helloGET" +} + +resource "google_cloudfunctions_function" "function_bucket" { + name = "%s-bucket" + available_memory_mb = 128 + source_archive_bucket = "${google_storage_bucket.bucket.name}" + source_archive_object = "${google_storage_bucket_object.archive.name}" + trigger_bucket = "${google_storage_bucket.bucket.name}" + timeout = 61 + entry_point = "helloGET" +} + +resource "google_pubsub_topic" "sub" { + name = "%s" +} + +resource "google_cloudfunctions_function" "function_pubsub" { + name = "%s-pubsub" + available_memory_mb = 128 + source_archive_bucket = "${google_storage_bucket.bucket.name}" + source_archive_object = "${google_storage_bucket_object.archive.name}" + trigger_topic = "${google_pubsub_topic.sub.name}" + timeout = 61 + entry_point = "helloGET" +} + +data "google_cloudfunctions_function" "function_http" { + name = "${google_cloudfunctions_function.function_http.name}" +} + +data "google_cloudfunctions_function" "function_bucket" { + name = "${google_cloudfunctions_function.function_bucket.name}" +} + +data "google_cloudfunctions_function" "function_pubsub" { + name = "${google_cloudfunctions_function.function_pubsub.name}" +} +`, bucketName, zipFilePath, functionName, functionName, + topicName, functionName) +} diff --git a/provider/terraform/tests/data_source_google_compute_instance_group_test.go b/provider/terraform/tests/data_source_google_compute_instance_group_test.go.erb similarity index 97% rename from provider/terraform/tests/data_source_google_compute_instance_group_test.go rename to provider/terraform/tests/data_source_google_compute_instance_group_test.go.erb index bc42d7805093..4c4bab6e98bc 100644 --- a/provider/terraform/tests/data_source_google_compute_instance_group_test.go +++ b/provider/terraform/tests/data_source_google_compute_instance_group_test.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -311,7 +312,14 @@ resource "google_compute_instance_template" "igm-basic" { resource "google_compute_instance_group_manager" "igm" { name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "igm" zone = "us-central1-a" target_size = 10 diff --git a/provider/terraform/tests/data_source_google_container_cluster_test.go b/provider/terraform/tests/data_source_google_container_cluster_test.go new file mode 100644 index 000000000000..414735f2caca --- /dev/null +++ b/provider/terraform/tests/data_source_google_container_cluster_test.go @@ -0,0 +1,142 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccContainerClusterDatasource_zonal(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccContainerClusterDatasource_zonal(), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleContainerClusterCheck("data.google_container_cluster.kubes", "google_container_cluster.kubes"), + ), + }, + }, + }) +} + +func TestAccContainerClusterDatasource_regional(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccContainerClusterDatasource_regional(), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleContainerClusterCheck("data.google_container_cluster.kubes", "google_container_cluster.kubes"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleContainerClusterCheck(dataSourceName string, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("root module has no resource called %s", dataSourceName) + } + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("can't find %s in state", resourceName) + } + + dsAttr := ds.Primary.Attributes + rsAttr := rs.Primary.Attributes + + clusterAttrToCheck := []string{ + "name", + "zone", + "additional_zones", + "addons_config", + "cluster_ipv4_cidr", + "description", + "enable_kubernetes_alpha", + "enable_tpu", + "enable_legacy_abac", + "endpoint", + "enable_legacy_abac", + "instance_group_urls", + "ip_allocation_policy", + "logging_service", + "maintenance_policy", + "master_auth", + "master_auth.0.password", + "master_auth.0.username", + "master_auth.0.client_certificate_config.0.issue_client_certificate", + "master_auth.0.client_certificate", + "master_auth.0.client_key", + "master_auth.0.cluster_ca_certificate", + "master_authorized_networks_config", + "master_version", + "min_master_version", + "monitoring_service", + "network", + "network_policy", + "node_version", + "subnetwork", + } + + for _, attr := range clusterAttrToCheck { + if dsAttr[attr] != rsAttr[attr] { + return fmt.Errorf( + "%s is %s; want %s", + attr, + dsAttr[attr], + rsAttr[attr], + ) + } + } + + return nil + } +} + +func testAccContainerClusterDatasource_zonal() string { + return fmt.Sprintf(` +resource "google_container_cluster" "kubes" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + master_auth { + username = "mr.yoda" + password = "adoy.rm.123456789" + } +} + +data "google_container_cluster" "kubes" { + name = "${google_container_cluster.kubes.name}" + zone = "${google_container_cluster.kubes.zone}" +} +`, acctest.RandString(10)) +} + +func testAccContainerClusterDatasource_regional() string { + return fmt.Sprintf(` +resource "google_container_cluster" "kubes" { + name = "cluster-test-%s" + region = "us-central1" + initial_node_count = 1 +} + +data "google_container_cluster" "kubes" { + name = "${google_container_cluster.kubes.name}" + region = "${google_container_cluster.kubes.region}" +} +`, acctest.RandString(10)) +} diff --git a/provider/terraform/tests/data_source_google_container_engine_versions_test.go b/provider/terraform/tests/data_source_google_container_engine_versions_test.go new file mode 100644 index 000000000000..4a6cef8b6b28 --- /dev/null +++ b/provider/terraform/tests/data_source_google_container_engine_versions_test.go @@ -0,0 +1,127 @@ +package google + +import ( + "errors" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccContainerEngineVersions_basic(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleContainerEngineVersionsConfig, + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleContainerEngineVersionsMeta("data.google_container_engine_versions.versions"), + ), + }, + }, + }) +} + +func TestAccContainerEngineVersions_regional(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleContainerEngineVersionsRegionalConfig, + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleContainerEngineVersionsMeta("data.google_container_engine_versions.versions"), + ), + }, + }, + }) +} + +func testAccCheckGoogleContainerEngineVersionsMeta(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Can't find versions data source: %s", n) + } + + if rs.Primary.ID == "" { + return errors.New("versions data source ID not set.") + } + + nodeCount, ok := rs.Primary.Attributes["valid_node_versions.#"] + if !ok { + return errors.New("can't find 'valid_node_versions' attribute") + } + + noOfNodes, err := strconv.Atoi(nodeCount) + if err != nil { + return errors.New("failed to read number of valid node versions") + } + if noOfNodes < 2 { + return fmt.Errorf("expected at least 2 valid node versions, received %d, this is most likely a bug", + noOfNodes) + } + + for i := 0; i < noOfNodes; i++ { + idx := "valid_node_versions." + strconv.Itoa(i) + v, ok := rs.Primary.Attributes[idx] + if !ok { + return fmt.Errorf("valid node versions list is corrupt (%q not found), this is definitely a bug", idx) + } + if len(v) < 1 { + return fmt.Errorf("Empty node version (%q), this is definitely a bug", idx) + } + } + + masterCount, ok := rs.Primary.Attributes["valid_master_versions.#"] + if !ok { + return errors.New("can't find 'valid_master_versions' attribute") + } + + noOfMasters, err := strconv.Atoi(masterCount) + if err != nil { + return errors.New("failed to read number of valid master versions") + } + if noOfMasters < 1 { + return fmt.Errorf("expected at least 1 valid master versions, received %d, this is most likely a bug", + noOfMasters) + } + + for i := 0; i < noOfMasters; i++ { + idx := "valid_master_versions." + strconv.Itoa(i) + v, ok := rs.Primary.Attributes[idx] + if !ok { + return fmt.Errorf("valid master versions list is corrupt (%q not found), this is definitely a bug", idx) + } + if len(v) < 1 { + return fmt.Errorf("Empty master version (%q), this is definitely a bug", idx) + } + } + + _, ok = rs.Primary.Attributes["default_cluster_version"] + if !ok { + return errors.New("Didn't get a default cluster version.") + } + + return nil + } +} + +var testAccCheckGoogleContainerEngineVersionsConfig = ` +data "google_container_engine_versions" "versions" { + zone = "us-central1-b" +} +` + +var testAccCheckGoogleContainerEngineVersionsRegionalConfig = ` +data "google_container_engine_versions" "versions" { + region = "us-central1" +} +` diff --git a/provider/terraform/tests/data_source_google_folder_test.go b/provider/terraform/tests/data_source_google_folder_test.go new file mode 100644 index 000000000000..655dfba08aaf --- /dev/null +++ b/provider/terraform/tests/data_source_google_folder_test.go @@ -0,0 +1,161 @@ +package google + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDataSourceGoogleFolder_byFullName(t *testing.T) { + org := getTestOrgFromEnv(t) + + parent := fmt.Sprintf("organizations/%s", org) + displayName := "terraform-test-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleFolder_byFullNameConfig(parent, displayName), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleFolderCheck("data.google_folder.folder", "google_folder.foobar"), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleFolder_byShortName(t *testing.T) { + org := getTestOrgFromEnv(t) + + parent := fmt.Sprintf("organizations/%s", org) + displayName := "terraform-test-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleFolder_byShortNameConfig(parent, displayName), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleFolderCheck("data.google_folder.folder", "google_folder.foobar"), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleFolder_lookupOrganization(t *testing.T) { + org := getTestOrgFromEnv(t) + + parent := fmt.Sprintf("organizations/%s", org) + displayName := "terraform-test-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleFolder_lookupOrganizationConfig(parent, displayName), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleFolderCheck("data.google_folder.folder", "google_folder.foobar"), + resource.TestCheckResourceAttr("data.google_folder.folder", "organization", parent), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleFolder_byFullNameNotFound(t *testing.T) { + name := "folders/" + acctest.RandString(16) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleFolder_byFullNameNotFoundConfig(name), + ExpectError: regexp.MustCompile("Folder Not Found : " + name), + }, + }, + }) +} + +func testAccDataSourceGoogleFolderCheck(data_source_name string, resource_name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[data_source_name] + if !ok { + return fmt.Errorf("root module has no resource called %s", data_source_name) + } + + rs, ok := s.RootModule().Resources[resource_name] + if !ok { + return fmt.Errorf("can't find %s in state", resource_name) + } + + ds_attr := ds.Primary.Attributes + rs_attr := rs.Primary.Attributes + folder_attrs_to_test := []string{"parent", "display_name", "name"} + + for _, attr_to_check := range folder_attrs_to_test { + if ds_attr[attr_to_check] != rs_attr[attr_to_check] { + return fmt.Errorf( + "%s is %s; want %s", + attr_to_check, + ds_attr[attr_to_check], + rs_attr[attr_to_check], + ) + } + } + return nil + } +} + +func testAccCheckGoogleFolder_byFullNameConfig(parent string, displayName string) string { + return fmt.Sprintf(` +resource "google_folder" "foobar" { + parent = "%s" + display_name = "%s" +} + +data "google_folder" "folder" { + folder = "${google_folder.foobar.name}" +}`, parent, displayName) +} + +func testAccCheckGoogleFolder_byShortNameConfig(parent string, displayName string) string { + return fmt.Sprintf(` +resource "google_folder" "foobar" { + parent = "%s" + display_name = "%s" +} + +data "google_folder" "folder" { + folder = "${replace(google_folder.foobar.name, "folders/", "")}" +}`, parent, displayName) +} + +func testAccCheckGoogleFolder_lookupOrganizationConfig(parent string, displayName string) string { + return fmt.Sprintf(` +resource "google_folder" "foobar" { + parent = "%s" + display_name = "%s" +} + +data "google_folder" "folder" { + folder = "${google_folder.foobar.name}" + lookup_organization = true +}`, parent, displayName) +} + +func testAccCheckGoogleFolder_byFullNameNotFoundConfig(name string) string { + return fmt.Sprintf(` +data "google_folder" "folder" { + folder = "%s" +}`, name) +} diff --git a/provider/terraform/tests/data_source_google_iam_policy_test.go b/provider/terraform/tests/data_source_google_iam_policy_test.go new file mode 100644 index 000000000000..71664db3c875 --- /dev/null +++ b/provider/terraform/tests/data_source_google_iam_policy_test.go @@ -0,0 +1 @@ +package google diff --git a/provider/terraform/tests/data_source_google_kms_secret_test.go b/provider/terraform/tests/data_source_google_kms_secret_test.go new file mode 100644 index 000000000000..e0d58f6394aa --- /dev/null +++ b/provider/terraform/tests/data_source_google_kms_secret_test.go @@ -0,0 +1,96 @@ +package google + +import ( + "encoding/base64" + "fmt" + "log" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/cloudkms/v1" +) + +func TestAccKmsSecret_basic(t *testing.T) { + t.Parallel() + + projectOrg := getTestOrgFromEnv(t) + projectBillingAccount := getTestBillingAccountFromEnv(t) + + projectId := "terraform-" + acctest.RandString(10) + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + + plaintext := fmt.Sprintf("secret-%s", acctest.RandString(10)) + + // The first test creates resources needed to encrypt plaintext and produce ciphertext + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleKmsCryptoKey_basic(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName), + Check: func(s *terraform.State) error { + ciphertext, cryptoKeyId, err := testAccEncryptSecretDataWithCryptoKey(s, "google_kms_crypto_key.crypto_key", plaintext) + + if err != nil { + return err + } + + // The second test asserts that the data source has the correct plaintext, given the created ciphertext + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleKmsSecret_datasource(cryptoKeyId.terraformId(), ciphertext), + Check: resource.TestCheckResourceAttr("data.google_kms_secret.acceptance", "plaintext", plaintext), + }, + }, + }) + + return nil + }, + }, + }, + }) +} + +func testAccEncryptSecretDataWithCryptoKey(s *terraform.State, cryptoKeyResourceName, plaintext string) (string, *kmsCryptoKeyId, error) { + config := testAccProvider.Meta().(*Config) + + rs, ok := s.RootModule().Resources[cryptoKeyResourceName] + if !ok { + return "", nil, fmt.Errorf("Resource not found: %s", cryptoKeyResourceName) + } + + cryptoKeyId, err := parseKmsCryptoKeyId(rs.Primary.Attributes["id"], config) + + if err != nil { + return "", nil, err + } + + kmsEncryptRequest := &cloudkms.EncryptRequest{ + Plaintext: base64.StdEncoding.EncodeToString([]byte(plaintext)), + } + + encryptResponse, err := config.clientKms.Projects.Locations.KeyRings.CryptoKeys.Encrypt(cryptoKeyId.cryptoKeyId(), kmsEncryptRequest).Do() + + if err != nil { + return "", nil, fmt.Errorf("Error encrypting plaintext: %s", err) + } + + log.Printf("[INFO] Successfully encrypted plaintext and got ciphertext: %s", encryptResponse.Ciphertext) + + return encryptResponse.Ciphertext, cryptoKeyId, nil +} + +func testGoogleKmsSecret_datasource(cryptoKeyTerraformId, ciphertext string) string { + return fmt.Sprintf(` +data "google_kms_secret" "acceptance" { + crypto_key = "%s" + ciphertext = "%s" +} + `, cryptoKeyTerraformId, ciphertext) +} diff --git a/provider/terraform/tests/data_source_google_netblock_ip_ranges_test.go b/provider/terraform/tests/data_source_google_netblock_ip_ranges_test.go new file mode 100644 index 000000000000..b18f2c395c54 --- /dev/null +++ b/provider/terraform/tests/data_source_google_netblock_ip_ranges_test.go @@ -0,0 +1,38 @@ +package google + +import ( + "regexp" + "testing" + + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccDataSourceGoogleNetblockIpRanges_basic(t *testing.T) { + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccNetblockIpRangesConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.some", + "cidr_blocks.#", regexp.MustCompile(("^[1-9]+[0-9]*$"))), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.some", + "cidr_blocks.0", regexp.MustCompile("^[0-9./:]+$")), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.some", + "cidr_blocks_ipv4.#", regexp.MustCompile(("^[1-9]+[0-9]*$"))), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.some", + "cidr_blocks_ipv4.0", regexp.MustCompile("^[0-9./]+$")), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.some", + "cidr_blocks_ipv6.#", regexp.MustCompile(("^[1-9]+[0-9]*$"))), + resource.TestMatchResourceAttr("data.google_netblock_ip_ranges.some", + "cidr_blocks_ipv6.0", regexp.MustCompile("^[0-9./:]+$")), + ), + }, + }, + }) +} + +const testAccNetblockIpRangesConfig = ` +data "google_netblock_ip_ranges" "some" {} +` diff --git a/provider/terraform/tests/data_source_google_organization_test.go b/provider/terraform/tests/data_source_google_organization_test.go new file mode 100644 index 000000000000..6c422794d8d9 --- /dev/null +++ b/provider/terraform/tests/data_source_google_organization_test.go @@ -0,0 +1,77 @@ +package google + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccDataSourceGoogleOrganization_byFullName(t *testing.T) { + orgId := getTestOrgFromEnv(t) + name := "organizations/" + orgId + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleOrganization_byName(name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_organization.org", "id", orgId), + resource.TestCheckResourceAttr("data.google_organization.org", "name", name), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleOrganization_byShortName(t *testing.T) { + orgId := getTestOrgFromEnv(t) + name := "organizations/" + orgId + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleOrganization_byName(orgId), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_organization.org", "id", orgId), + resource.TestCheckResourceAttr("data.google_organization.org", "name", name), + ), + }, + }, + }) +} + +func TestAccDataSourceGoogleOrganization_byDomain(t *testing.T) { + name := acctest.RandString(16) + ".com" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleOrganization_byDomain(name), + ExpectError: regexp.MustCompile("Organization not found: " + name), + }, + }, + }) +} + +func testAccCheckGoogleOrganization_byName(name string) string { + return fmt.Sprintf(` +data "google_organization" "org" { + organization = "%s" +}`, name) +} + +func testAccCheckGoogleOrganization_byDomain(name string) string { + return fmt.Sprintf(` +data "google_organization" "org" { + domain = "%s" +}`, name) +} diff --git a/provider/terraform/tests/data_source_google_project_services_test.go b/provider/terraform/tests/data_source_google_project_services_test.go new file mode 100644 index 000000000000..de05a9ea7b6d --- /dev/null +++ b/provider/terraform/tests/data_source_google_project_services_test.go @@ -0,0 +1,82 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDataSourceGoogleProjectServices_basic(t *testing.T) { + t.Parallel() + org := getTestOrgFromEnv(t) + project := "terraform-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleProjectServicesConfig(project, org), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleProjectServicesCheck("data.google_project_services.project_services", "google_project_services.project_services"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleProjectServicesCheck(dataSourceName string, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("root module has no resource called %s", dataSourceName) + } + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("can't find %s in state", resourceName) + } + + dsAttr := ds.Primary.Attributes + rsAttr := rs.Primary.Attributes + + projectAttrToCheck := []string{ + "project", + "services", + } + + for _, attr := range projectAttrToCheck { + if dsAttr[attr] != rsAttr[attr] { + return fmt.Errorf( + "%s is %s; want %s", + attr, + dsAttr[attr], + rsAttr[attr], + ) + } + } + + return nil + } +} + +func testAccCheckGoogleProjectServicesConfig(project, org string) string { + return fmt.Sprintf(` +resource "google_project" "project" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_services" "project_services" { + project = "${google_project.project.id}" + services = ["admin.googleapis.com"] +} + +data "google_project_services" "project_services" { + project = "${google_project.project.id}" +}`, project, project, org) +} diff --git a/provider/terraform/tests/data_source_google_project_test.go b/provider/terraform/tests/data_source_google_project_test.go new file mode 100644 index 000000000000..c6982060b992 --- /dev/null +++ b/provider/terraform/tests/data_source_google_project_test.go @@ -0,0 +1,81 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDataSourceGoogleProject_basic(t *testing.T) { + t.Parallel() + org := getTestOrgFromEnv(t) + project := "terraform-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleProjectConfig(project, org), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleProjectCheck("data.google_project.project", "google_project.project"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleProjectCheck(dataSourceName string, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("root module has no resource called %s", dataSourceName) + } + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("can't find %s in state", resourceName) + } + + dsAttr := ds.Primary.Attributes + rsAttr := rs.Primary.Attributes + + projectAttrToCheck := []string{ + "project_id", + "name", + "billing_account", + "org_id", + "folder_id", + "number", + } + + for _, attr := range projectAttrToCheck { + if dsAttr[attr] != rsAttr[attr] { + return fmt.Errorf( + "%s is %s; want %s", + attr, + dsAttr[attr], + rsAttr[attr], + ) + } + } + + return nil + } +} + +func testAccCheckGoogleProjectConfig(project, org string) string { + return fmt.Sprintf(` +resource "google_project" "project" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +data "google_project" "project" { + project_id = "${google_project.project.project_id}" +}`, project, project, org) +} diff --git a/provider/terraform/tests/data_source_google_service_account_key_test.go b/provider/terraform/tests/data_source_google_service_account_key_test.go new file mode 100644 index 000000000000..b9ecb9640d34 --- /dev/null +++ b/provider/terraform/tests/data_source_google_service_account_key_test.go @@ -0,0 +1,132 @@ +package google + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "strings" +) + +func TestAccDatasourceGoogleServiceAccountKey_basic(t *testing.T) { + t.Parallel() + + resourceName := "data.google_service_account_key.acceptance" + account := acctest.RandomWithPrefix("tf-test") + serviceAccountName := fmt.Sprintf( + "projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", + getTestProjectFromEnv(), + account, + getTestProjectFromEnv(), + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDatasourceGoogleServiceAccountKey(account), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleServiceAccountKeyExists(resourceName), + // Check that the 'name' starts with the service account name + resource.TestMatchResourceAttr(resourceName, "name", regexp.MustCompile(serviceAccountName)), + resource.TestCheckResourceAttrSet(resourceName, "key_algorithm"), + resource.TestCheckResourceAttrSet(resourceName, "public_key"), + ), + }, + { + Config: testAccDatasourceGoogleServiceAccountKey_deprecated(account), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleServiceAccountKeyExists(resourceName), + // Check that the 'name' starts with the service account name + resource.TestMatchResourceAttr(resourceName, "name", regexp.MustCompile(serviceAccountName)), + resource.TestCheckResourceAttrSet(resourceName, "key_algorithm"), + resource.TestCheckResourceAttrSet(resourceName, "public_key"), + ), + }, + }, + }) +} + +func TestAccDatasourceGoogleServiceAccountKey_errors(t *testing.T) { + t.Parallel() + + account := acctest.RandomWithPrefix("tf-test") + serviceAccountName := fmt.Sprintf( + "projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", + getTestProjectFromEnv(), + account, + getTestProjectFromEnv(), + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDatasourceGoogleServiceAccountKey_error( + account, + `name = "${google_service_account.acceptance.name}"`), + ExpectError: regexp.MustCompile( + fmt.Sprintf("invalid key name %q", serviceAccountName)), + }, + { + Config: testAccDatasourceGoogleServiceAccountKey_error( + account, + `service_account_id = "${google_service_account.acceptance.id}"`), + ExpectError: regexp.MustCompile( + fmt.Sprintf("invalid key name %q", serviceAccountName)), + }, + }, + }) +} + +func testAccDatasourceGoogleServiceAccountKey(account string) string { + return fmt.Sprintf(` +resource "google_service_account" "acceptance" { + account_id = "%s" +} + +resource "google_service_account_key" "acceptance" { + service_account_id = "${google_service_account.acceptance.name}" + public_key_type = "TYPE_X509_PEM_FILE" +} + +data "google_service_account_key" "acceptance" { + name = "${google_service_account_key.acceptance.name}" +}`, account) +} + +func testAccDatasourceGoogleServiceAccountKey_deprecated(account string) string { + return fmt.Sprintf(` +resource "google_service_account" "acceptance" { + account_id = "%s" +} + +resource "google_service_account_key" "acceptance" { + service_account_id = "${google_service_account.acceptance.name}" + public_key_type = "TYPE_X509_PEM_FILE" +} + +data "google_service_account_key" "acceptance" { + service_account_id = "${google_service_account_key.acceptance.name}" +}`, account) +} + +func testAccDatasourceGoogleServiceAccountKey_error(account string, incorrectDataFields ...string) string { + return fmt.Sprintf(` +resource "google_service_account" "acceptance" { + account_id = "%s" +} + +resource "google_service_account_key" "acceptance" { + service_account_id = "${google_service_account.acceptance.name}" + public_key_type = "TYPE_X509_PEM_FILE" +} + +data "google_service_account_key" "acceptance" { +%s +}`, account, strings.Join(incorrectDataFields, "\n\t")) +} diff --git a/provider/terraform/tests/data_source_google_service_account_test.go b/provider/terraform/tests/data_source_google_service_account_test.go new file mode 100644 index 000000000000..e1a6ab016142 --- /dev/null +++ b/provider/terraform/tests/data_source_google_service_account_test.go @@ -0,0 +1,47 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccDatasourceGoogleServiceAccount_basic(t *testing.T) { + t.Parallel() + + resourceName := "data.google_service_account.acceptance" + account := acctest.RandomWithPrefix("tf-test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleServiceAccount_basic(account), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + resourceName, "id", fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", getTestProjectFromEnv(), account, getTestProjectFromEnv())), + resource.TestCheckResourceAttrSet(resourceName, "email"), + resource.TestCheckResourceAttrSet(resourceName, "unique_id"), + resource.TestCheckResourceAttrSet(resourceName, "name"), + resource.TestCheckResourceAttrSet(resourceName, "display_name"), + ), + }, + }, + }) +} + +func testAccCheckGoogleServiceAccount_basic(account string) string { + return fmt.Sprintf(` +resource "google_service_account" "acceptance" { + account_id = "%s" + display_name = "Testing Account" +} + +data "google_service_account" "acceptance" { + account_id = "${google_service_account.acceptance.account_id}" +} +`, account) +} diff --git a/provider/terraform/tests/data_source_google_storage_project_service_account_test.go b/provider/terraform/tests/data_source_google_storage_project_service_account_test.go new file mode 100644 index 000000000000..bedbd73592e5 --- /dev/null +++ b/provider/terraform/tests/data_source_google_storage_project_service_account_test.go @@ -0,0 +1,31 @@ +package google + +import ( + "testing" + + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccDataSourceGoogleStorageProjectServiceAccount_basic(t *testing.T) { + t.Parallel() + + resourceName := "data.google_storage_project_service_account.gcs_account" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleStorageProjectServiceAccount_basic, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "email_address"), + ), + }, + }, + }) +} + +const testAccCheckGoogleStorageProjectServiceAccount_basic = ` +data "google_storage_project_service_account" "gcs_account" { +} +` diff --git a/provider/terraform/tests/data_source_storage_object_signed_url_test.go b/provider/terraform/tests/data_source_storage_object_signed_url_test.go new file mode 100644 index 000000000000..1cb54fb4124e --- /dev/null +++ b/provider/terraform/tests/data_source_storage_object_signed_url_test.go @@ -0,0 +1,267 @@ +package google + +import ( + "testing" + + "bytes" + "encoding/base64" + "fmt" + "io/ioutil" + "net/http" + "net/url" + + "github.com/hashicorp/go-cleanhttp" + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "golang.org/x/oauth2/google" +) + +const fakeCredentials = `{ + "type": "service_account", + "project_id": "gcp-project", + "private_key_id": "29a54056cee3d6886d9e8515a959af538ab5add9", + "private_key": "-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEAsGHDAdHZfi81LgVeeMHXYLgNDpcFYhoBykYtTDdNyA5AixID\n8JdKlCmZ6qLNnZrbs4JlBJfmzw6rjUC5bVBFg5NwYVBu3+3Msa4rgLsTGsjPH9rt\nC+QFnFhcmzg3zz8eeXBqJdhw7wmn1Xa9SsC3h6YWveBk98ecyE7yGe8J8xGphjk7\nEQ/KBmRK/EJD0ZwuYW1W4Bv5f5fca7qvi9rCprEmL8//uy0qCwoJj2jU3zc5p72M\npkSZb1XlYxxTEo/h9WCEvWS9pGhy6fJ0sA2RsBHqU4Y5O7MJEei9yu5fVSZUi05f\n/ggfUID+cFEq0Z/A98whKPEBBJ/STdEaqEEkBwIDAQABAoIBAED6EsvF0dihbXbh\ntXbI+h4AT5cTXYFRUV2B0sgkC3xqe65/2YG1Sl0gojoE9bhcxxjvLWWuy/F1Vw93\nS5gQnTsmgpzm86F8yg6euhn3UMdqOJtknDToMITzLFJmOHEZsJFOL1x3ysrUhMan\nsn4qVrIbJn+WfbumBoToSFnzbHflacOh06ZRbYa2bpSPMfGGFtwqQjRadn5+pync\nlCjaupcg209sM0qEk/BDSzHvWL1VgLMdiKBx574TSwS0o569+7vPNt92Ydi7kARo\nreOzkkF4L3xNhKZnmls2eGH6A8cp1KZXoMLFuO+IwvBMA0O29LsUlKJU4PjBrf+7\nwaslnMECgYEA5bJv0L6DKZQD3RCBLue4/mDg0GHZqAhJBS6IcaXeaWeH6PgGZggV\nMGkWnULltJIYFwtaueTfjWqciAeocKx+rqoRjuDMOGgcrEf6Y+b5AqF+IjQM66Ll\nIYPUt3FCIc69z5LNEtyP4DSWsFPJ5UhAoG4QRlDTqT5q0gKHFjeLdeECgYEAxJRk\nkrsWmdmUs5NH9pyhTdEDIc59EuJ8iOqOLzU8xUw6/s2GSClopEFJeeEoIWhLuPY3\nX3bFt4ppl/ksLh05thRs4wXRxqhnokjD3IcGu3l6Gb5QZTYwb0VfN+q2tWVEE8Qc\nPQURheUsM2aP/gpJVQvNsWVmkT0Ijc3J8bR2hucCgYEAjOF4e0ueHu5NwFTTJvWx\nHTRGLwkU+l66ipcT0MCvPW7miRk2s3XZqSuLV0Ekqi/A3sF0D/g0tQPipfwsb48c\n0/wzcLKoDyCsFW7AQG315IswVcIe+peaeYfl++1XZmzrNlkPtrXY+ObIVbXOavZ5\nzOw0xyvj5jYGRnCOci33N4ECgYA91EKx2ABq0YGw3aEj0u31MMlgZ7b1KqFq2wNv\nm7oKgEiJ/hC/P673AsXefNAHeetfOKn/77aOXQ2LTEb2FiEhwNjiquDpL+ywoVxh\nT2LxsmqSEEbvHpUrWlFxn/Rpp3k7ElKjaqWxTHyTii2+BHQ+OKEwq6kQA3deSpy6\n1jz1fwKBgQDLqbdq5FA63PWqApfNVykXukg9MASIcg/0fjADFaHTPDvJjhFutxRP\nppI5Q95P12CQ/eRBZKJnRlkhkL8tfPaWPzzOpCTjID7avRhx2oLmstmYuXx0HluE\ncqXLbAV9WDpIJ3Bpa/S8tWujWhLDmixn2JeAdurWS+naH9U9e4I6Rw==\n-----END RSA PRIVATE KEY-----\n", + "client_email": "user@gcp-project.iam.gserviceaccount.com", + "client_id": "103198861025845558729", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://accounts.google.com/o/oauth2/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/user%40gcp-project.iam.gserviceaccount.com" +}` + +// The following values are derived from the output of the `gsutil signurl` command. +// i.e. +// gsutil signurl fake_creds.json gs://tf-test-bucket-6159205297736845881/path/to/file +// URL HTTP Method Expiration Signed URL +// gs://tf-test-bucket-6159205297736845881/path/to/file GET 2016-08-12 14:03:30 https://storage.googleapis.com/tf-test-bucket-6159205297736845881/path/to/file?GoogleAccessId=user@gcp-project.iam.gserviceaccount.com&Expires=1470967410&Signature=JJvE2Jc%2BeoagyS1qRACKBGUkgLkKjw7cGymHhtB4IzzN3nbXDqr0acRWGy0%2BEpZ3HYNDalEYsK0lR9Q0WCgty5I0JKmPIuo9hOYa1xTNH%2B22xiWsekxGV%2FcA9FXgWpi%2BFt7fBmMk4dhDe%2BuuYc7N79hd0FYuSBNW1Wp32Bluoe4SNkNAB%2BuIDd9KqPzqs09UAbBoz2y4WxXOQnRyR8GAfb8B%2FDtv62gYjtmp%2F6%2Fyr6xj7byWKZdQt8kEftQLTQmP%2F17Efjp6p%2BXo71Q0F9IhAFiqWfp3Ij8hHDSebLcVb2ULXyHNNQpHBOhFgALrFW3I6Uc3WciLEOsBS9Ej3EGdTg%3D%3D + +const testUrlPath = "/tf-test-bucket-6159205297736845881/path/to/file" +const testUrlExpires = 1470967410 +const testUrlExpectedSignatureBase64Encoded = "JJvE2Jc%2BeoagyS1qRACKBGUkgLkKjw7cGymHhtB4IzzN3nbXDqr0acRWGy0%2BEpZ3HYNDalEYsK0lR9Q0WCgty5I0JKmPIuo9hOYa1xTNH%2B22xiWsekxGV%2FcA9FXgWpi%2BFt7fBmMk4dhDe%2BuuYc7N79hd0FYuSBNW1Wp32Bluoe4SNkNAB%2BuIDd9KqPzqs09UAbBoz2y4WxXOQnRyR8GAfb8B%2FDtv62gYjtmp%2F6%2Fyr6xj7byWKZdQt8kEftQLTQmP%2F17Efjp6p%2BXo71Q0F9IhAFiqWfp3Ij8hHDSebLcVb2ULXyHNNQpHBOhFgALrFW3I6Uc3WciLEOsBS9Ej3EGdTg%3D%3D" +const testUrlExpectedUrl = "https://storage.googleapis.com/tf-test-bucket-6159205297736845881/path/to/file?GoogleAccessId=user@gcp-project.iam.gserviceaccount.com&Expires=1470967410&Signature=JJvE2Jc%2BeoagyS1qRACKBGUkgLkKjw7cGymHhtB4IzzN3nbXDqr0acRWGy0%2BEpZ3HYNDalEYsK0lR9Q0WCgty5I0JKmPIuo9hOYa1xTNH%2B22xiWsekxGV%2FcA9FXgWpi%2BFt7fBmMk4dhDe%2BuuYc7N79hd0FYuSBNW1Wp32Bluoe4SNkNAB%2BuIDd9KqPzqs09UAbBoz2y4WxXOQnRyR8GAfb8B%2FDtv62gYjtmp%2F6%2Fyr6xj7byWKZdQt8kEftQLTQmP%2F17Efjp6p%2BXo71Q0F9IhAFiqWfp3Ij8hHDSebLcVb2ULXyHNNQpHBOhFgALrFW3I6Uc3WciLEOsBS9Ej3EGdTg%3D%3D" + +func TestUrlData_Signing(t *testing.T) { + urlData := &UrlData{ + HttpMethod: "GET", + Expires: testUrlExpires, + Path: testUrlPath, + } + // unescape and decode the expected signature + expectedSig, err := url.QueryUnescape(testUrlExpectedSignatureBase64Encoded) + if err != nil { + t.Error(err) + } + expected, err := base64.StdEncoding.DecodeString(expectedSig) + if err != nil { + t.Error(err) + } + + // load fake service account credentials + cfg, err := google.JWTConfigFromJSON([]byte(fakeCredentials), "") + if err != nil { + t.Error(err) + } + + // create url data signature + toSign := urlData.SigningString() + result, err := SignString(toSign, cfg) + if err != nil { + t.Error(err) + } + + // compare to expected value + if !bytes.Equal(result, expected) { + t.Errorf("Signatures do not match:\n%x\n%x\n", expected, result) + } + +} + +func TestUrlData_SignedUrl(t *testing.T) { + // load fake service account credentials + cfg, err := google.JWTConfigFromJSON([]byte(fakeCredentials), "") + if err != nil { + t.Error(err) + } + + urlData := &UrlData{ + HttpMethod: "GET", + Expires: testUrlExpires, + Path: testUrlPath, + JwtConfig: cfg, + } + result, err := urlData.SignedUrl() + if err != nil { + t.Errorf("Could not generated signed url: %+v", err) + } + if result != testUrlExpectedUrl { + t.Errorf("URL does not match expected value:\n%s\n%s", testUrlExpectedUrl, result) + } +} + +func TestAccStorageSignedUrl_basic(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleSignedUrlConfig, + Check: resource.ComposeTestCheckFunc( + testAccSignedUrlExists("data.google_storage_object_signed_url.blerg"), + ), + }, + }, + }) +} + +func TestAccStorageSignedUrl_accTest(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-bucket-%d", acctest.RandInt()) + + headers := map[string]string{ + "x-goog-test": "foo", + "x-goog-if-generation-match": "1", + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccTestGoogleStorageObjectSignedURL(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccSignedUrlRetrieval("data.google_storage_object_signed_url.story_url", nil), + testAccSignedUrlRetrieval("data.google_storage_object_signed_url.story_url_w_headers", headers), + testAccSignedUrlRetrieval("data.google_storage_object_signed_url.story_url_w_content_type", nil), + testAccSignedUrlRetrieval("data.google_storage_object_signed_url.story_url_w_md5", nil), + ), + }, + }, + }) +} + +func testAccSignedUrlExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + r := s.RootModule().Resources[n] + a := r.Primary.Attributes + + if a["signed_url"] == "" { + return fmt.Errorf("signed_url is empty: %v", a) + } + + return nil + } +} + +func testAccSignedUrlRetrieval(n string, headers map[string]string) resource.TestCheckFunc { + return func(s *terraform.State) error { + r := s.RootModule().Resources[n] + if r == nil { + return fmt.Errorf("Datasource not found") + } + a := r.Primary.Attributes + + if a["signed_url"] == "" { + return fmt.Errorf("signed_url is empty: %v", a) + } + + // create HTTP request + url := a["signed_url"] + method := a["http_method"] + req, err := http.NewRequest(method, url, nil) + if err != nil { + return err + } + + // Add extension headers to request, if provided + for k, v := range headers { + req.Header.Set(k, v) + } + + // content_type is optional, add to test query if provided in datasource config + contentType := a["content_type"] + if contentType != "" { + req.Header.Add("Content-Type", contentType) + } + + // content_md5 is optional, add to test query if provided in datasource config + contentMd5 := a["content_md5"] + if contentMd5 != "" { + req.Header.Add("Content-MD5", contentMd5) + } + + // send request using signed url + client := cleanhttp.DefaultClient() + response, err := client.Do(req) + if err != nil { + return err + } + defer response.Body.Close() + + // check content in response, should be our test string or XML with error + body, err := ioutil.ReadAll(response.Body) + if err != nil { + return err + } + if string(body) != "once upon a time..." { + return fmt.Errorf("Got unexpected object contents: %s\n\tURL: %s", string(body), url) + } + + return nil + } +} + +const testGoogleSignedUrlConfig = ` +data "google_storage_object_signed_url" "blerg" { + bucket = "friedchicken" + path = "path/to/file" + +} +` + +func testAccTestGoogleStorageObjectSignedURL(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "story" { + name = "path/to/file" + bucket = "${google_storage_bucket.bucket.name}" + + content = "once upon a time..." +} + +data "google_storage_object_signed_url" "story_url" { + bucket = "${google_storage_bucket.bucket.name}" + path = "${google_storage_bucket_object.story.name}" + +} + +data "google_storage_object_signed_url" "story_url_w_headers" { + bucket = "${google_storage_bucket.bucket.name}" + path = "${google_storage_bucket_object.story.name}" + extension_headers { + x-goog-test = "foo" + x-goog-if-generation-match = 1 + } +} + +data "google_storage_object_signed_url" "story_url_w_content_type" { + bucket = "${google_storage_bucket.bucket.name}" + path = "${google_storage_bucket_object.story.name}" + + content_type = "text/plain" +} + +data "google_storage_object_signed_url" "story_url_w_md5" { + bucket = "${google_storage_bucket.bucket.name}" + path = "${google_storage_bucket_object.story.name}" + + content_md5 = "${google_storage_bucket_object.story.md5hash}" +}`, bucketName) +} diff --git a/provider/terraform/tests/resource_app_engine_application_test.go b/provider/terraform/tests/resource_app_engine_application_test.go new file mode 100644 index 000000000000..010507af1c74 --- /dev/null +++ b/provider/terraform/tests/resource_app_engine_application_test.go @@ -0,0 +1,77 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccAppEngineApplication_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := acctest.RandomWithPrefix("tf-test") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccAppEngineApplication_basic(pid, org), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_app_engine_application.acceptance", "url_dispatch_rule.#"), + resource.TestCheckResourceAttrSet("google_app_engine_application.acceptance", "name"), + resource.TestCheckResourceAttrSet("google_app_engine_application.acceptance", "code_bucket"), + resource.TestCheckResourceAttrSet("google_app_engine_application.acceptance", "default_hostname"), + resource.TestCheckResourceAttrSet("google_app_engine_application.acceptance", "default_bucket"), + ), + }, + { + ResourceName: "google_app_engine_application.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccAppEngineApplication_update(pid, org), + }, + { + ResourceName: "google_app_engine_application.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccAppEngineApplication_basic(pid, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_app_engine_application" "acceptance" { + project = "${google_project.acceptance.project_id}" + auth_domain = "hashicorptest.com" + location_id = "us-central" + serving_status = "SERVING" +}`, pid, pid, org) +} + +func testAccAppEngineApplication_update(pid, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_app_engine_application" "acceptance" { + project = "${google_project.acceptance.project_id}" + auth_domain = "tf-test.club" + location_id = "us-central" + serving_status = "USER_DISABLED" +}`, pid, pid, org) +} diff --git a/provider/terraform/tests/resource_bigquery_dataset_test.go b/provider/terraform/tests/resource_bigquery_dataset_test.go new file mode 100644 index 000000000000..9783c2af54a2 --- /dev/null +++ b/provider/terraform/tests/resource_bigquery_dataset_test.go @@ -0,0 +1,219 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccBigQueryDataset_basic(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigQueryDatasetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigQueryDataset(datasetID), + }, + { + ResourceName: "google_bigquery_dataset.test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryDatasetUpdated(datasetID), + }, + { + ResourceName: "google_bigquery_dataset.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccBigQueryDataset_access(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_access_%s", acctest.RandString(10)) + otherDatasetID := fmt.Sprintf("tf_test_other_%s", acctest.RandString(10)) + otherTableID := fmt.Sprintf("tf_test_other_%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigQueryDatasetDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigQueryDatasetWithOneAccess(datasetID), + }, + { + ResourceName: "google_bigquery_dataset.access_test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryDatasetWithTwoAccess(datasetID), + }, + { + ResourceName: "google_bigquery_dataset.access_test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryDatasetWithOneAccess(datasetID), + }, + { + ResourceName: "google_bigquery_dataset.access_test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryDatasetWithViewAccess(datasetID, otherDatasetID, otherTableID), + }, + { + ResourceName: "google_bigquery_dataset.access_test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckBigQueryDatasetDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_bigquery_dataset" { + continue + } + + _, err := config.clientBigQuery.Datasets.Get(config.Project, rs.Primary.Attributes["dataset_id"]).Do() + if err == nil { + return fmt.Errorf("Dataset still exists") + } + } + + return nil +} + +func testAccBigQueryDataset(datasetID string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "%s" + friendly_name = "foo" + description = "This is a foo description" + location = "EU" + default_table_expiration_ms = 3600000 + + labels { + env = "foo" + default_table_expiration_ms = 3600000 + } +}`, datasetID) +} + +func testAccBigQueryDatasetUpdated(datasetID string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "%s" + friendly_name = "bar" + description = "This is a bar description" + location = "EU" + default_table_expiration_ms = 7200000 + + labels { + env = "bar" + default_table_expiration_ms = 7200000 + } +}`, datasetID) +} + +func testAccBigQueryDatasetWithOneAccess(datasetID string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "access_test" { + dataset_id = "%s" + + access { + role = "OWNER" + user_by_email = "Joe@example.com" + } + + labels { + env = "foo" + default_table_expiration_ms = 3600000 + } +}`, datasetID) +} + +func testAccBigQueryDatasetWithTwoAccess(datasetID string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "access_test" { + dataset_id = "%s" + + access { + role = "OWNER" + user_by_email = "Joe@example.com" + } + access { + role = "READER" + domain = "example.com" + } + + labels { + env = "foo" + default_table_expiration_ms = 3600000 + } +}`, datasetID) +} + +func testAccBigQueryDatasetWithViewAccess(datasetID, otherDatasetID, otherTableID string) string { + // Note that we have to add a non-view access to prevent BQ from creating 4 default + // access entries. + return fmt.Sprintf(` +resource "google_bigquery_dataset" "other_dataset" { + dataset_id = "%s" +} + +resource "google_bigquery_table" "table_with_view" { + table_id = "%s" + dataset_id = "${google_bigquery_dataset.other_dataset.dataset_id}" + + time_partitioning { + type = "DAY" + } + + view { + query = "SELECT state FROM [lookerdata:cdc.project_tycho_reports]" + use_legacy_sql = true + } +} + +resource "google_bigquery_dataset" "access_test" { + dataset_id = "%s" + + access { + role = "OWNER" + user_by_email = "Joe@example.com" + } + access { + view { + project_id = "${google_bigquery_dataset.other_dataset.project}" + dataset_id = "${google_bigquery_dataset.other_dataset.dataset_id}" + table_id = "${google_bigquery_table.table_with_view.table_id}" + } + } + + labels { + env = "foo" + default_table_expiration_ms = 3600000 + } +}`, otherDatasetID, otherTableID, datasetID) +} diff --git a/provider/terraform/tests/resource_bigquery_table_test.go b/provider/terraform/tests/resource_bigquery_table_test.go new file mode 100644 index 000000000000..7f63543b36cc --- /dev/null +++ b/provider/terraform/tests/resource_bigquery_table_test.go @@ -0,0 +1,258 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccBigQueryTable_Basic(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(10)) + tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigQueryTableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTable(datasetID, tableID), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryTableUpdated(datasetID, tableID), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccBigQueryTable_View(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(10)) + tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigQueryTableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableWithView(datasetID, tableID), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccBigQueryTable_ViewWithLegacySQL(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(10)) + tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigQueryTableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableWithView(datasetID, tableID), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryTableWithNewSqlView(datasetID, tableID), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckBigQueryTableDestroy(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_bigquery_table" { + continue + } + + config := testAccProvider.Meta().(*Config) + _, err := config.clientBigQuery.Tables.Get(config.Project, rs.Primary.Attributes["dataset_id"], rs.Primary.Attributes["table_id"]).Do() + if err == nil { + return fmt.Errorf("Table still present") + } + } + + return nil +} + +func testAccBigQueryTable(datasetID, tableID string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "%s" +} + +resource "google_bigquery_table" "test" { + table_id = "%s" + dataset_id = "${google_bigquery_dataset.test.dataset_id}" + + time_partitioning { + type = "DAY" + field = "ts" + } + + schema = < package google import ( @@ -260,7 +261,14 @@ resource "google_compute_target_pool" "foobar" { resource "google_compute_instance_group_manager" "foobar" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> target_pools = ["${google_compute_target_pool.foobar.self_link}"] base_instance_name = "foobar" zone = "us-central1-a" diff --git a/provider/terraform/tests/resource_compute_backend_service_test.go b/provider/terraform/tests/resource_compute_backend_service_test.go.erb similarity index 97% rename from provider/terraform/tests/resource_compute_backend_service_test.go rename to provider/terraform/tests/resource_compute_backend_service_test.go.erb index 19d8116a6ad5..d9c79fafeab2 100644 --- a/provider/terraform/tests/resource_compute_backend_service_test.go +++ b/provider/terraform/tests/resource_compute_backend_service_test.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -699,7 +700,14 @@ resource "google_compute_backend_service" "lipsum" { resource "google_compute_instance_group_manager" "foobar" { name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "foobar" zone = "us-central1-f" target_size = 1 @@ -762,7 +770,14 @@ resource "google_compute_backend_service" "lipsum" { resource "google_compute_instance_group_manager" "foobar" { name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "foobar" zone = "us-central1-f" target_size = 1 @@ -915,7 +930,14 @@ resource "google_compute_backend_service" "lipsum" { resource "google_compute_instance_group_manager" "foobar" { name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "foobar" zone = "us-central1-f" target_size = 1 @@ -973,7 +995,14 @@ resource "google_compute_backend_service" "lipsum" { resource "google_compute_instance_group_manager" "foobar" { name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "foobar" zone = "us-central1-f" target_size = 1 diff --git a/provider/terraform/tests/resource_compute_disk_test.go.erb b/provider/terraform/tests/resource_compute_disk_test.go.erb new file mode 100644 index 000000000000..d03cfc85423a --- /dev/null +++ b/provider/terraform/tests/resource_compute_disk_test.go.erb @@ -0,0 +1,899 @@ +<% autogen_exception -%> +package google + +import ( + "fmt" + "os" + "regexp" + "strconv" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/compute/v1" +) + +func TestDiskImageDiffSuppress(t *testing.T) { + cases := map[string]struct { + Old, New string + ExpectDiffSuppress bool + }{ + // Full & partial links + "matching self_link with different api version": { + Old: "https://www.googleapis.com/compute/beta/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + ExpectDiffSuppress: true, + }, + "matching image partial self_link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "projects/debian-cloud/global/images/debian-8-jessie-v20171213", + ExpectDiffSuppress: true, + }, + "matching image partial no project self_link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "global/images/debian-8-jessie-v20171213", + ExpectDiffSuppress: true, + }, + "different image self_link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-7-jessie-v20171213", + ExpectDiffSuppress: false, + }, + "different image partial self_link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "projects/debian-cloud/global/images/debian-7-jessie-v20171213", + ExpectDiffSuppress: false, + }, + "different image partial no project self_link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "global/images/debian-7-jessie-v20171213", + ExpectDiffSuppress: false, + }, + // Image name + "matching image name": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "debian-8-jessie-v20171213", + ExpectDiffSuppress: true, + }, + "different image name": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "debian-7-jessie-v20171213", + ExpectDiffSuppress: false, + }, + // Image short hand + "matching image short hand": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "debian-cloud/debian-8-jessie-v20171213", + ExpectDiffSuppress: true, + }, + "matching image short hand but different project": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "different-cloud/debian-8-jessie-v20171213", + ExpectDiffSuppress: false, + }, + "different image short hand": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "debian-cloud/debian-7-jessie-v20171213", + ExpectDiffSuppress: false, + }, + // Image Family + "matching image family": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "family/debian-8", + ExpectDiffSuppress: true, + }, + "matching image family self link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/family/debian-8", + ExpectDiffSuppress: true, + }, + "matching unconventional image family self link": { + Old: "https://www.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/ubuntu-1404-trusty-v20180122", + New: "https://www.googleapis.com/compute/v1/projects/projects/ubuntu-os-cloud/global/images/family/ubuntu-1404-lts", + ExpectDiffSuppress: true, + }, + "matching image family partial self link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "projects/debian-cloud/global/images/family/debian-8", + ExpectDiffSuppress: true, + }, + "matching unconventional image family partial self link": { + Old: "https://www.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/ubuntu-1404-trusty-v20180122", + New: "projects/ubuntu-os-cloud/global/images/family/ubuntu-1404-lts", + ExpectDiffSuppress: true, + }, + "matching image family partial no project self link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "global/images/family/debian-8", + ExpectDiffSuppress: true, + }, + "matching image family short hand": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "debian-cloud/debian-8", + ExpectDiffSuppress: true, + }, + "matching image family short hand with project short name": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "debian/debian-8", + ExpectDiffSuppress: true, + }, + "matching unconventional image family short hand": { + Old: "https://www.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/ubuntu-1404-trusty-v20180122", + New: "ubuntu-os-cloud/ubuntu-1404-lts", + ExpectDiffSuppress: true, + }, + "matching unconventional image family - minimal": { + Old: "https://www.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/ubuntu-minimal-1804-bionic-v20180705", + New: "ubuntu-minimal-1804-lts", + ExpectDiffSuppress: true, + }, + "different image family": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "family/debian-7", + ExpectDiffSuppress: false, + }, + "different image family self link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/family/debian-7", + ExpectDiffSuppress: false, + }, + "different image family partial self link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "projects/debian-cloud/global/images/family/debian-7", + ExpectDiffSuppress: false, + }, + "different image family partial no project self link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "global/images/family/debian-7", + ExpectDiffSuppress: false, + }, + "matching image family but different project in self link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "https://www.googleapis.com/compute/v1/projects/other-cloud/global/images/family/debian-8", + ExpectDiffSuppress: false, + }, + "different image family but different project in partial self link": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "projects/other-cloud/global/images/family/debian-8", + ExpectDiffSuppress: false, + }, + "different image family short hand": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "debian-cloud/debian-7", + ExpectDiffSuppress: false, + }, + "matching image family shorthand but different project": { + Old: "https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/debian-8-jessie-v20171213", + New: "different-cloud/debian-8", + ExpectDiffSuppress: false, + }, + } + + for tn, tc := range cases { + if diskImageDiffSuppress("image", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { + t.Errorf("bad: %s, %q => %q expect DiffSuppress to return %t", tn, tc.Old, tc.New, tc.ExpectDiffSuppress) + } + } +} + +// Test that all the naming pattern for public images are supported. +func TestAccComputeDisk_imageDiffSuppressPublicVendorsFamilyNames(t *testing.T) { + t.Parallel() + + if os.Getenv(resource.TestEnvVar) == "" { + t.Skip(fmt.Sprintf("Network access not allowed; use %s=1 to enable", resource.TestEnvVar)) + } + + config := getInitializedConfig(t) + + for _, publicImageProject := range imageMap { + token := "" + for paginate := true; paginate; { + resp, err := config.clientCompute.Images.List(publicImageProject).Filter("deprecated.replacement ne .*images.*").PageToken(token).Do() + if err != nil { + t.Fatalf("Can't list public images for project %q", publicImageProject) + } + + for _, image := range resp.Items { + if !diskImageDiffSuppress("image", image.SelfLink, "family/"+image.Family, nil) { + t.Errorf("should suppress diff for image %q and family %q", image.SelfLink, image.Family) + } + } + token := resp.NextPageToken + paginate = token != "" + } + } +} + +func TestAccComputeDisk_basic(t *testing.T) { + t.Parallel() + + diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + var disk compute.Disk + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckComputeDiskDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccComputeDisk_basic(diskName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foobar", getTestProjectFromEnv(), &disk), + testAccCheckComputeDiskHasLabel(&disk, "my-label", "my-label-value"), + testAccCheckComputeDiskHasLabelFingerprint(&disk, "google_compute_disk.foobar"), + ), + }, + resource.TestStep{ + ResourceName: "google_compute_disk.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccComputeDisk_timeout(t *testing.T) { + t.Parallel() + + diskName := acctest.RandomWithPrefix("tf-test-disk") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccComputeDisk_timeout(diskName), + ExpectError: regexp.MustCompile("timeout"), + }, + }, + }) +} + +func TestAccComputeDisk_update(t *testing.T) { + t.Parallel() + + diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + var disk compute.Disk + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccComputeDisk_basic(diskName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foobar", getTestProjectFromEnv(), &disk), + resource.TestCheckResourceAttr("google_compute_disk.foobar", "size", "50"), + testAccCheckComputeDiskHasLabel(&disk, "my-label", "my-label-value"), + testAccCheckComputeDiskHasLabelFingerprint(&disk, "google_compute_disk.foobar"), + ), + }, + { + Config: testAccComputeDisk_updated(diskName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foobar", getTestProjectFromEnv(), &disk), + resource.TestCheckResourceAttr("google_compute_disk.foobar", "size", "100"), + testAccCheckComputeDiskHasLabel(&disk, "my-label", "my-updated-label-value"), + testAccCheckComputeDiskHasLabel(&disk, "a-new-label", "a-new-label-value"), + testAccCheckComputeDiskHasLabelFingerprint(&disk, "google_compute_disk.foobar"), + ), + }, + }, + }) +} + +func TestAccComputeDisk_fromSnapshot(t *testing.T) { + t.Parallel() + + diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + firstDiskName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + snapshotName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + projectName := getTestProjectFromEnv() + + var disk compute.Disk + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckComputeDiskDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccComputeDisk_fromSnapshot(projectName, firstDiskName, snapshotName, diskName, "self_link"), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.seconddisk", getTestProjectFromEnv(), &disk), + ), + }, + resource.TestStep{ + Config: testAccComputeDisk_fromSnapshot(projectName, firstDiskName, snapshotName, diskName, "name"), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.seconddisk", getTestProjectFromEnv(), &disk), + ), + }, + }, + }) +} + +func TestAccComputeDisk_encryption(t *testing.T) { + t.Parallel() + + diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + var disk compute.Disk + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckComputeDiskDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccComputeDisk_encryption(diskName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foobar", getTestProjectFromEnv(), &disk), + testAccCheckEncryptionKey( + "google_compute_disk.foobar", &disk), + ), + }, + }, + }) +} + +<% unless version.nil? || version == 'beta' -%> +func TestAccComputeDisk_encryptionKMS(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "tf-test-" + acctest.RandString(10) + billingAccount := getTestBillingAccountFromEnv(t) + diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + keyName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + importID := fmt.Sprintf("%s/%s/%s", pid, "us-central1-a", diskName) + var disk compute.Disk + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckComputeDiskDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccComputeDisk_encryptionKMS(pid, pname, org, billingAccount, diskName, keyRingName, keyName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foobar", pid, &disk), + testAccCheckEncryptionKey( + "google_compute_disk.foobar", &disk), + ), + }, + resource.TestStep{ + ResourceName: "google_compute_disk.foobar", + ImportStateId: importID, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +<% end -%> + +func TestAccComputeDisk_deleteDetach(t *testing.T) { + t.Parallel() + + diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + var disk compute.Disk + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckComputeDiskDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccComputeDisk_deleteDetach(instanceName, diskName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foo", getTestProjectFromEnv(), &disk), + ), + }, + // this needs to be a second step so we refresh and see the instance + // listed as attached to the disk; the instance is created after the + // disk. and the disk's properties aren't refreshed unless there's + // another step + resource.TestStep{ + Config: testAccComputeDisk_deleteDetach(instanceName, diskName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foo", getTestProjectFromEnv(), &disk), + testAccCheckComputeDiskInstances( + "google_compute_disk.foo", &disk), + ), + }, + }, + }) +} + +func TestAccComputeDisk_deleteDetachIGM(t *testing.T) { + t.Parallel() + + diskName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + diskName2 := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + mgrName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + var disk compute.Disk + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckComputeDiskDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccComputeDisk_deleteDetachIGM(diskName, mgrName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foo", getTestProjectFromEnv(), &disk), + ), + }, + // this needs to be a second step so we refresh and see the instance + // listed as attached to the disk; the instance is created after the + // disk. and the disk's properties aren't refreshed unless there's + // another step + resource.TestStep{ + Config: testAccComputeDisk_deleteDetachIGM(diskName, mgrName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foo", getTestProjectFromEnv(), &disk), + testAccCheckComputeDiskInstances( + "google_compute_disk.foo", &disk), + ), + }, + // Change the disk name to recreate the instances + resource.TestStep{ + Config: testAccComputeDisk_deleteDetachIGM(diskName2, mgrName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foo", getTestProjectFromEnv(), &disk), + ), + }, + // Add the extra step like before + resource.TestStep{ + Config: testAccComputeDisk_deleteDetachIGM(diskName2, mgrName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeDiskExists( + "google_compute_disk.foo", getTestProjectFromEnv(), &disk), + testAccCheckComputeDiskInstances( + "google_compute_disk.foo", &disk), + ), + }, + }, + }) +} + +func TestAccComputeDisk_computeDiskUserRegex(t *testing.T) { + + shouldPass := []string{ + + "https://www.googleapis.com/compute/v1/projects/project-id/zones/us-central1/instances/123", + "https://www.googleapis.com/compute/v1/projects/123123/zones/us-central1/instances/123", + "https://www.googleapis.com/compute/v1/projects/hashicorptest.net:project-123/zones/us-central1/instances/123", + "https://www.googleapis.com/compute/v1/projects/123/zones/456/instances/789", + } + + shouldFail := []string{ + "https://www.googleapis.com/compute/v1/projects/project#/zones/us-central1/instances/123", + "https://www.googleapis.com/compute/v1/projects/project/zones/us-central#/instances/123", + "https://www.googleapis.com/compute/v1/projects/project/zones/us-central1/instances/?", + "https://www.googleapis.com/compute/v1/projects/foo.com:bar:baz/zones/us-central1/instances/?", + "https://www.googleapis.com/compute/v1/projects/foo.com:/zones/us-central1/instances/?", + } + + for _, element := range shouldPass { + if !computeDiskUserRegex.MatchString(element) { + t.Error("computeDiskUserRegex should match on '" + element + "' but doesn't") + } + } + + for _, element := range shouldFail { + if computeDiskUserRegex.MatchString(element) { + t.Error("computeDiskUserRegex shouldn't match on '" + element + "' but does") + } + } + +} + +func testAccCheckComputeDiskDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_compute_disk" { + continue + } + + _, err := config.clientCompute.Disks.Get( + config.Project, rs.Primary.Attributes["zone"], rs.Primary.ID).Do() + if err == nil { + return fmt.Errorf("Disk still exists") + } + } + + return nil +} + +func testAccCheckComputeDiskExists(n, p string, disk *compute.Disk) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + found, err := config.clientCompute.Disks.Get( + p, rs.Primary.Attributes["zone"], rs.Primary.ID).Do() + if err != nil { + return err + } + + if found.Name != rs.Primary.ID { + return fmt.Errorf("Disk not found") + } + + *disk = *found + + return nil + } +} + +func testAccCheckComputeDiskHasLabel(disk *compute.Disk, key, value string) resource.TestCheckFunc { + return func(s *terraform.State) error { + val, ok := disk.Labels[key] + if !ok { + return fmt.Errorf("Label with key %s not found", key) + } + + if val != value { + return fmt.Errorf("Label value did not match for key %s: expected %s but found %s", key, value, val) + } + return nil + } +} + +func testAccCheckComputeDiskHasLabelFingerprint(disk *compute.Disk, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + state := s.RootModule().Resources[resourceName] + if state == nil { + return fmt.Errorf("Unable to find resource named %s", resourceName) + } + + labelFingerprint := state.Primary.Attributes["label_fingerprint"] + if labelFingerprint != disk.LabelFingerprint { + return fmt.Errorf("Label fingerprints do not match: api returned %s but state has %s", + disk.LabelFingerprint, labelFingerprint) + } + + return nil + } +} + +func testAccCheckEncryptionKey(n string, disk *compute.Disk) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + attr := rs.Primary.Attributes["disk_encryption_key.0.sha256"] + if disk.DiskEncryptionKey == nil { + return fmt.Errorf("Disk %s has mismatched encryption key.\nTF State: %+v\nGCP State: ", n, attr) + } else if attr != disk.DiskEncryptionKey.Sha256 { + return fmt.Errorf("Disk %s has mismatched encryption key.\nTF State: %+v.\nGCP State: %+v", + n, attr, disk.DiskEncryptionKey.Sha256) + } + return nil + } +} + +func testAccCheckComputeDiskInstances(n string, disk *compute.Disk) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + attr := rs.Primary.Attributes["users.#"] + if strconv.Itoa(len(disk.Users)) != attr { + return fmt.Errorf("Disk %s has mismatched users.\nTF State: %+v\nGCP State: %+v", n, rs.Primary.Attributes["users"], disk.Users) + } + + for pos, user := range disk.Users { + if rs.Primary.Attributes["users."+strconv.Itoa(pos)] != user { + return fmt.Errorf("Disk %s has mismatched users.\nTF State: %+v.\nGCP State: %+v", + n, rs.Primary.Attributes["users"], disk.Users) + } + } + return nil + } +} + +func testAccComputeDisk_basic(diskName string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} + +resource "google_compute_disk" "foobar" { + name = "%s" + image = "${data.google_compute_image.my_image.self_link}" + size = 50 + type = "pd-ssd" + zone = "us-central1-a" + labels { + my-label = "my-label-value" + } +}`, diskName) +} + +func testAccComputeDisk_timeout(diskName string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} + +resource "google_compute_disk" "foobar" { + name = "%s" + image = "${data.google_compute_image.my_image.self_link}" + type = "pd-ssd" + zone = "us-central1-a" + + timeouts { + create = "1s" + } +}`, diskName) +} + +func testAccComputeDisk_updated(diskName string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} + +resource "google_compute_disk" "foobar" { + name = "%s" + image = "${data.google_compute_image.my_image.self_link}" + size = 100 + type = "pd-ssd" + zone = "us-central1-a" + labels { + my-label = "my-updated-label-value" + a-new-label = "a-new-label-value" + } +}`, diskName) +} + +func testAccComputeDisk_fromSnapshot(projectName, firstDiskName, snapshotName, diskName, ref_selector string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} + +resource "google_compute_disk" "foobar" { + name = "d1-%s" + image = "${data.google_compute_image.my_image.self_link}" + size = 50 + type = "pd-ssd" + zone = "us-central1-a" + project = "%s" +} + +resource "google_compute_snapshot" "snapdisk" { + name = "%s" + source_disk = "${google_compute_disk.foobar.name}" + zone = "us-central1-a" + project = "%s" +} + +resource "google_compute_disk" "seconddisk" { + name = "d2-%s" + snapshot = "${google_compute_snapshot.snapdisk.%s}" + type = "pd-ssd" + zone = "us-central1-a" + project = "%s" +}`, firstDiskName, projectName, snapshotName, projectName, diskName, ref_selector, projectName) +} + +func testAccComputeDisk_encryption(diskName string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} + +resource "google_compute_disk" "foobar" { + name = "%s" + image = "${data.google_compute_image.my_image.self_link}" + size = 50 + type = "pd-ssd" + zone = "us-central1-a" + disk_encryption_key { + raw_key = "SGVsbG8gZnJvbSBHb29nbGUgQ2xvdWQgUGxhdGZvcm0=" + } +}`, diskName) +} + +func testAccComputeDisk_encryptionKMS(pid, pname, org, billing, diskName, keyRingName, keyName string) string { + return fmt.Sprintf(` +resource "google_project" "project" { + project_id = "%s" + name = "%s" + org_id = "%s" + billing_account = "%s" +} + +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} + +resource "google_project_services" "apis" { + project = "${google_project.project.project_id}" + + services = [ + "oslogin.googleapis.com", + "compute.googleapis.com", + "cloudkms.googleapis.com", + "appengine.googleapis.com", + ] +} + +resource "google_project_iam_member" "kms-project-binding" { + project = "${google_project.project.project_id}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${google_project.project.number}@compute-system.iam.gserviceaccount.com" + + depends_on = ["google_project_services.apis"] +} + +resource "google_kms_crypto_key_iam_binding" "kms-key-binding" { + crypto_key_id = "${google_kms_crypto_key.my_crypto_key.self_link}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + members = [ + "serviceAccount:service-${google_project.project.number}@compute-system.iam.gserviceaccount.com", + ] + + depends_on = ["google_project_services.apis"] +} + +resource "google_kms_key_ring" "my_key_ring" { + name = "%s" + project = "${google_project.project.project_id}" + location = "us-central1" + + depends_on = ["google_project_services.apis"] +} + +resource "google_kms_crypto_key" "my_crypto_key" { + name = "%s" + key_ring = "${google_kms_key_ring.my_key_ring.self_link}" +} + +resource "google_compute_disk" "foobar" { + name = "%s" + image = "${data.google_compute_image.my_image.self_link}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" + project = "${google_project.project.project_id}" + + disk_encryption_key { + kms_key_self_link = "${google_kms_crypto_key.my_crypto_key.self_link}" + } + + depends_on = [ + "google_kms_crypto_key_iam_binding.kms-key-binding", + "google_project_iam_member.kms-project-binding", + ] +} +`, pid, pname, org, billing, keyRingName, keyName, diskName) +} + +func testAccComputeDisk_deleteDetach(instanceName, diskName string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} + +resource "google_compute_disk" "foo" { + name = "%s" + image = "${data.google_compute_image.my_image.self_link}" + size = 50 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_instance" "bar" { + name = "%s" + machine_type = "n1-standard-1" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = "${data.google_compute_image.my_image.self_link}" + } + } + + attached_disk { + source = "${google_compute_disk.foo.self_link}" + } + + network_interface { + network = "default" + } +}`, diskName, instanceName) +} + +func testAccComputeDisk_deleteDetachIGM(diskName, mgrName string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} + +resource "google_compute_disk" "foo" { + name = "%s" + image = "${data.google_compute_image.my_image.self_link}" + size = 50 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_instance_template" "template" { + machine_type = "g1-small" + + disk { + boot = true + source = "${google_compute_disk.foo.name}" + auto_delete = false + } + + network_interface { + network = "default" + } + + lifecycle { + create_before_destroy = true + } +} + +resource "google_compute_instance_group_manager" "manager" { + name = "%s" + base_instance_name = "disk-igm" +<% if version.nil? || version == 'ga' -%> + instance_template = "${google_compute_instance_template.template.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.template.self_link}" + name = "primary" + } + update_policy { + minimal_action = "RESTART" + type = "PROACTIVE" + max_unavailable_fixed = 1 + } +<% end -%> + zone = "us-central1-a" + target_size = 1 +}`, diskName, mgrName) +} diff --git a/provider/terraform/tests/resource_compute_instance_group_manager_test.go b/provider/terraform/tests/resource_compute_instance_group_manager_test.go.erb similarity index 77% rename from provider/terraform/tests/resource_compute_instance_group_manager_test.go rename to provider/terraform/tests/resource_compute_instance_group_manager_test.go.erb index 5326f18622d8..8ab73042783f 100644 --- a/provider/terraform/tests/resource_compute_instance_group_manager_test.go +++ b/provider/terraform/tests/resource_compute_instance_group_manager_test.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -128,6 +129,7 @@ func TestAccInstanceGroupManager_updateLifecycle(t *testing.T) { }) } +<% if version.nil? || version == 'ga' -%> func TestAccInstanceGroupManager_updateStrategy(t *testing.T) { t.Parallel() @@ -149,6 +151,7 @@ func TestAccInstanceGroupManager_updateStrategy(t *testing.T) { }, }) } +<% end -%> func TestAccInstanceGroupManager_rollingUpdatePolicy(t *testing.T) { t.Parallel() @@ -160,14 +163,45 @@ func TestAccInstanceGroupManager_rollingUpdatePolicy(t *testing.T) { Providers: testAccProviders, CheckDestroy: testAccCheckInstanceGroupManagerDestroy, Steps: []resource.TestStep{ - resource.TestStep{ + { Config: testAccInstanceGroupManager_rollingUpdatePolicy(igm), }, +<% if version.nil? || version == 'ga' -%> // No import step because rolling updates are broken and the field will be removed in 2.0.0. // TODO(danawillow): Remove this test once we've removed the field. - resource.TestStep{ +<% else -%> + { + ResourceName: "google_compute_instance_group_manager.igm-rolling-update-policy", + ImportState: true, + ImportStateVerify: true, + }, +<% end -%> + { Config: testAccInstanceGroupManager_rollingUpdatePolicy2(igm), }, +<% unless version.nil? || version == 'ga' -%> + { + ResourceName: "google_compute_instance_group_manager.igm-rolling-update-policy", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccInstanceGroupManager_rollingUpdatePolicy3(igm), + }, + { + ResourceName: "google_compute_instance_group_manager.igm-rolling-update-policy", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccInstanceGroupManager_rollingUpdatePolicy4(igm), + }, + { + ResourceName: "google_compute_instance_group_manager.igm-rolling-update-policy", + ImportState: true, + ImportStateVerify: true, + }, +<% end -%> }, }) } @@ -245,6 +279,16 @@ func TestAccInstanceGroupManager_autoHealingPolicies(t *testing.T) { ImportState: true, ImportStateVerify: true, }, +<% unless version.nil? || version == 'ga' -%> + { + Config: testAccInstanceGroupManager_autoHealingPoliciesRemoved(template, target, igm, hck), + }, + { + ResourceName: "google_compute_instance_group_manager.igm-basic", + ImportState: true, + ImportStateVerify: true, + }, +<% end -%> }, }) } @@ -347,7 +391,14 @@ func testAccInstanceGroupManager_basic(template, target, igm1, igm2 string) stri resource "google_compute_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + name = "prod" + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + } +<% end -%> target_pools = ["${google_compute_target_pool.igm-basic.self_link}"] base_instance_name = "igm-basic" zone = "us-central1-c" @@ -357,7 +408,14 @@ func testAccInstanceGroupManager_basic(template, target, igm1, igm2 string) stri resource "google_compute_instance_group_manager" "igm-no-tp" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + name = "prod" + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + } +<% end -%> base_instance_name = "igm-no-tp" zone = "us-central1-c" target_size = 2 @@ -400,7 +458,14 @@ func testAccInstanceGroupManager_targetSizeZero(template, igm string) string { resource "google_compute_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + name = "prod" + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + } +<% end -%> base_instance_name = "igm-basic" zone = "us-central1-c" } @@ -448,7 +513,14 @@ func testAccInstanceGroupManager_update(template, target, igm string) string { resource "google_compute_instance_group_manager" "igm-update" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-update.self_link}" +<% else -%> + version { + name = "prod" + instance_template = "${google_compute_instance_template.igm-update.self_link}" + } +<% end -%> target_pools = ["${google_compute_target_pool.igm-update.self_link}"] base_instance_name = "igm-update" zone = "us-central1-c" @@ -533,7 +605,14 @@ func testAccInstanceGroupManager_update2(template1, target1, target2, template2, resource "google_compute_instance_group_manager" "igm-update" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-update2.self_link}" +<% else -%> + version { + name = "prod" + instance_template = "${google_compute_instance_template.igm-update2.self_link}" + } +<% end -%> target_pools = [ "${google_compute_target_pool.igm-update.self_link}", "${google_compute_target_pool.igm-update2.self_link}", @@ -586,7 +665,14 @@ func testAccInstanceGroupManager_updateLifecycle(tag, igm string) string { resource "google_compute_instance_group_manager" "igm-update" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-update.self_link}" +<% else -%> + version { + name = "prod" + instance_template = "${google_compute_instance_template.igm-update.self_link}" + } +<% end -%> base_instance_name = "igm-update" zone = "us-central1-c" target_size = 2 @@ -597,6 +683,7 @@ func testAccInstanceGroupManager_updateLifecycle(tag, igm string) string { }`, tag, igm) } +<% if version.nil? || version == 'ga' -%> func testAccInstanceGroupManager_updateStrategy(igm string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -642,6 +729,7 @@ func testAccInstanceGroupManager_updateStrategy(igm string) string { } }`, igm) } +<% end -%> func testAccInstanceGroupManager_rollingUpdatePolicy(igm string) string { return fmt.Sprintf(` @@ -677,12 +765,23 @@ resource "google_compute_instance_template" "igm-rolling-update-policy" { resource "google_compute_instance_group_manager" "igm-rolling-update-policy" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" +<% else -%> + version { + name = "prod" + instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" + } +<% end -%> base_instance_name = "igm-rolling-update-policy" zone = "us-central1-c" target_size = 3 +<% if version.nil? || version == 'ga' -%> update_strategy = "ROLLING_UPDATE" rolling_update_policy { +<% else -%> + update_policy { +<% end -%> type = "PROACTIVE" minimal_action = "REPLACE" max_surge_percent = 50 @@ -726,12 +825,23 @@ resource "google_compute_instance_template" "igm-rolling-update-policy" { resource "google_compute_instance_group_manager" "igm-rolling-update-policy" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" +<% else -%> + version { + name = "prod2" + instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" + } +<% end -%> base_instance_name = "igm-rolling-update-policy" zone = "us-central1-c" target_size = 3 +<% if version.nil? || version == 'ga' -%> update_strategy = "ROLLING_UPDATE" rolling_update_policy { +<% else -%> + update_policy { +<% end -%> type = "PROACTIVE" minimal_action = "REPLACE" max_surge_fixed = 2 @@ -745,6 +855,100 @@ resource "google_compute_instance_group_manager" "igm-rolling-update-policy" { }`, igm) } +<% unless version.nil? || version == 'ga' -%> +func testAccInstanceGroupManager_rollingUpdatePolicy3(igm string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} +resource "google_compute_instance_template" "igm-rolling-update-policy" { + machine_type = "n1-standard-1" + can_ip_forward = false + tags = ["terraform-testing"] + disk { + source_image = "${data.google_compute_image.my_image.self_link}" + auto_delete = true + boot = true + } + network_interface { + network = "default" + } + lifecycle { + create_before_destroy = true + } +} +resource "google_compute_instance_group_manager" "igm-rolling-update-policy" { + description = "Terraform test instance group manager" + name = "%s" + version { + name = "prod2" + instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" + } + base_instance_name = "igm-rolling-update-policy" + zone = "us-central1-c" + target_size = 3 + update_policy { + type = "PROACTIVE" + minimal_action = "REPLACE" + max_surge_fixed = 0 + max_unavailable_fixed = 2 + min_ready_sec = 20 + } + named_port { + name = "customhttp" + port = 8080 + } +}`, igm) +} + +func testAccInstanceGroupManager_rollingUpdatePolicy4(igm string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} +resource "google_compute_instance_template" "igm-rolling-update-policy" { + machine_type = "n1-standard-1" + can_ip_forward = false + tags = ["terraform-testing"] + disk { + source_image = "${data.google_compute_image.my_image.self_link}" + auto_delete = true + boot = true + } + network_interface { + network = "default" + } + lifecycle { + create_before_destroy = true + } +} +resource "google_compute_instance_group_manager" "igm-rolling-update-policy" { + description = "Terraform test instance group manager" + name = "%s" + version { + name = "prod2" + instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" + } + base_instance_name = "igm-rolling-update-policy" + zone = "us-central1-c" + target_size = 3 + update_policy { + type = "PROACTIVE" + minimal_action = "REPLACE" + max_surge_fixed = 2 + max_unavailable_fixed = 0 + min_ready_sec = 20 + } + named_port { + name = "customhttp" + port = 8080 + } +}`, igm) +} +<% end -%> + func testAccInstanceGroupManager_separateRegions(igm1, igm2 string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -779,7 +983,14 @@ func testAccInstanceGroupManager_separateRegions(igm1, igm2 string) string { resource "google_compute_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "prod" + } +<% end -%> base_instance_name = "igm-basic" zone = "us-central1-c" target_size = 2 @@ -788,7 +999,14 @@ func testAccInstanceGroupManager_separateRegions(igm1, igm2 string) string { resource "google_compute_instance_group_manager" "igm-basic-2" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + name = "prod" + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + } +<% end -%> base_instance_name = "igm-basic-2" zone = "us-west1-b" target_size = 2 @@ -833,7 +1051,14 @@ resource "google_compute_target_pool" "igm-basic" { resource "google_compute_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "prod" + } +<% end -%> target_pools = ["${google_compute_target_pool.igm-basic.self_link}"] base_instance_name = "igm-basic" zone = "us-central1-c" @@ -853,6 +1078,60 @@ resource "google_compute_http_health_check" "zero" { `, template, target, igm, hck) } +<% unless version.nil? || version == 'ga' -%> +func testAccInstanceGroupManager_autoHealingPoliciesRemoved(template, target, igm, hck string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} +resource "google_compute_instance_template" "igm-basic" { + name = "%s" + machine_type = "n1-standard-1" + can_ip_forward = false + tags = ["foo", "bar"] + disk { + source_image = "${data.google_compute_image.my_image.self_link}" + auto_delete = true + boot = true + } + network_interface { + network = "default" + } + metadata { + foo = "bar" + } + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } +} +resource "google_compute_target_pool" "igm-basic" { + description = "Resource created for Terraform acceptance testing" + name = "%s" + session_affinity = "CLIENT_IP_PROTO" +} +resource "google_compute_instance_group_manager" "igm-basic" { + description = "Terraform test instance group manager" + name = "%s" + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "prod" + } + target_pools = ["${google_compute_target_pool.igm-basic.self_link}"] + base_instance_name = "igm-basic" + zone = "us-central1-c" + target_size = 2 +} +resource "google_compute_http_health_check" "zero" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + `, template, target, igm, hck) +} +<% end -%> + func testAccInstanceGroupManager_versions(primaryTemplate string, canaryTemplate string, igm string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -966,7 +1245,14 @@ resource "google_compute_target_pool" "igm-basic" { resource "google_compute_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "primary" + } +<% end -%> target_pools = ["${google_compute_target_pool.igm-basic.self_link}"] base_instance_name = "igm-basic" zone = "us-central1-c" diff --git a/provider/terraform/tests/resource_compute_region_autoscaler_test.go b/provider/terraform/tests/resource_compute_region_autoscaler_test.go.erb similarity index 95% rename from provider/terraform/tests/resource_compute_region_autoscaler_test.go rename to provider/terraform/tests/resource_compute_region_autoscaler_test.go.erb index 2336fddc27d1..4c6e4d7efd2a 100644 --- a/provider/terraform/tests/resource_compute_region_autoscaler_test.go +++ b/provider/terraform/tests/resource_compute_region_autoscaler_test.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -191,7 +192,14 @@ resource "google_compute_target_pool" "foobar" { resource "google_compute_region_instance_group_manager" "foobar" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> target_pools = ["${google_compute_target_pool.foobar.self_link}"] base_instance_name = "foobar" region = "us-central1" @@ -256,7 +264,14 @@ resource "google_compute_target_pool" "foobar" { resource "google_compute_region_instance_group_manager" "foobar" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> target_pools = ["${google_compute_target_pool.foobar.self_link}"] base_instance_name = "foobar" region = "us-central1" diff --git a/provider/terraform/tests/resource_compute_region_backend_service_test.go b/provider/terraform/tests/resource_compute_region_backend_service_test.go.erb similarity index 98% rename from provider/terraform/tests/resource_compute_region_backend_service_test.go rename to provider/terraform/tests/resource_compute_region_backend_service_test.go.erb index bde3198c4995..2d8fd722f102 100644 --- a/provider/terraform/tests/resource_compute_region_backend_service_test.go +++ b/provider/terraform/tests/resource_compute_region_backend_service_test.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -328,7 +329,14 @@ resource "google_compute_region_backend_service" "lipsum" { resource "google_compute_instance_group_manager" "foobar" { name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "foobar" zone = "us-central1-f" target_size = 1 diff --git a/provider/terraform/tests/resource_compute_region_instance_group_manager_test.go b/provider/terraform/tests/resource_compute_region_instance_group_manager_test.go.erb similarity index 84% rename from provider/terraform/tests/resource_compute_region_instance_group_manager_test.go rename to provider/terraform/tests/resource_compute_region_instance_group_manager_test.go.erb index 83a1eb93d6e4..7ca9e0728f09 100644 --- a/provider/terraform/tests/resource_compute_region_instance_group_manager_test.go +++ b/provider/terraform/tests/resource_compute_region_instance_group_manager_test.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -129,6 +130,7 @@ func TestAccRegionInstanceGroupManager_updateLifecycle(t *testing.T) { }) } +<% if version.nil? || version == 'ga' -%> func TestAccRegionInstanceGroupManager_updateStrategy(t *testing.T) { t.Parallel() @@ -150,6 +152,7 @@ func TestAccRegionInstanceGroupManager_updateStrategy(t *testing.T) { }, }) } +<% end -%> func TestAccRegionInstanceGroupManager_rollingUpdatePolicy(t *testing.T) { t.Parallel() @@ -161,14 +164,29 @@ func TestAccRegionInstanceGroupManager_rollingUpdatePolicy(t *testing.T) { Providers: testAccProviders, CheckDestroy: testAccCheckInstanceGroupManagerDestroy, Steps: []resource.TestStep{ - resource.TestStep{ + { Config: testAccRegionInstanceGroupManager_rollingUpdatePolicy(igm), }, +<% if version.nil? || version == 'ga' -%> // No import step because rolling updates are broken and the field will be removed in 2.0.0. // TODO(danawillow): Remove this test once we've removed the field. - resource.TestStep{ +<% else -%> + { + ResourceName: "google_compute_region_instance_group_manager.igm-rolling-update-policy", + ImportState: true, + ImportStateVerify: true, + }, +<% end -%> + { Config: testAccRegionInstanceGroupManager_rollingUpdatePolicy2(igm), }, +<% unless version.nil? || version == 'ga' -%> + { + ResourceName: "google_compute_region_instance_group_manager.igm-rolling-update-policy", + ImportState: true, + ImportStateVerify: true, + }, +<% end -%> }, }) } @@ -245,6 +263,16 @@ func TestAccRegionInstanceGroupManager_autoHealingPolicies(t *testing.T) { ImportState: true, ImportStateVerify: true, }, +<% unless version.nil? || version == 'ga' -%> + { + Config: testAccRegionInstanceGroupManager_autoHealingPoliciesRemoved(template, target, igm, hck), + }, + { + ResourceName: "google_compute_region_instance_group_manager.igm-basic", + ImportState: true, + ImportStateVerify: true, + }, +<% end -%> }, }) } @@ -341,7 +369,14 @@ func testAccRegionInstanceGroupManager_basic(template, target, igm1, igm2 string resource "google_compute_region_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + name = "primary" + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + } +<% end -%> target_pools = ["${google_compute_target_pool.igm-basic.self_link}"] base_instance_name = "igm-basic" region = "us-central1" @@ -351,7 +386,14 @@ func testAccRegionInstanceGroupManager_basic(template, target, igm1, igm2 string resource "google_compute_region_instance_group_manager" "igm-no-tp" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + name = "primary" + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + } +<% end -%> base_instance_name = "igm-no-tp" region = "us-central1" target_size = 2 @@ -394,7 +436,14 @@ func testAccRegionInstanceGroupManager_targetSizeZero(template, igm string) stri resource "google_compute_region_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + name = "primary" + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + } +<% end -%> base_instance_name = "igm-basic" region = "us-central1" } @@ -442,7 +491,14 @@ func testAccRegionInstanceGroupManager_update(template, target, igm string) stri resource "google_compute_region_instance_group_manager" "igm-update" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-update.self_link}" +<% else -%> + version { + name = "primary" + instance_template = "${google_compute_instance_template.igm-update.self_link}" + } +<% end -%> target_pools = ["${google_compute_target_pool.igm-update.self_link}"] base_instance_name = "igm-update" region = "us-central1" @@ -527,7 +583,14 @@ func testAccRegionInstanceGroupManager_update2(template1, target1, target2, temp resource "google_compute_region_instance_group_manager" "igm-update" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-update2.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-update2.self_link}" + name = "primary" + } +<% end -%> target_pools = [ "${google_compute_target_pool.igm-update.self_link}", "${google_compute_target_pool.igm-update2.self_link}", @@ -580,7 +643,14 @@ func testAccRegionInstanceGroupManager_updateLifecycle(tag, igm string) string { resource "google_compute_region_instance_group_manager" "igm-update" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-update.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-update.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "igm-update" region = "us-central1" target_size = 2 @@ -625,7 +695,14 @@ func testAccRegionInstanceGroupManager_separateRegions(igm1, igm2 string) string resource "google_compute_region_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "igm-basic" region = "us-central1" target_size = 2 @@ -634,7 +711,14 @@ func testAccRegionInstanceGroupManager_separateRegions(igm1, igm2 string) string resource "google_compute_region_instance_group_manager" "igm-basic-2" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "igm-basic-2" region = "us-west1" target_size = 2 @@ -679,7 +763,14 @@ resource "google_compute_target_pool" "igm-basic" { resource "google_compute_region_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "primary" + } +<% end -%> target_pools = ["${google_compute_target_pool.igm-basic.self_link}"] base_instance_name = "igm-basic" region = "us-central1" @@ -698,6 +789,61 @@ resource "google_compute_http_health_check" "zero" { } `, template, target, igm, hck) } + +<% unless version.nil? || version == 'ga' -%> +func testAccRegionInstanceGroupManager_autoHealingPoliciesRemoved(template, target, igm, hck string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-9" + project = "debian-cloud" +} +resource "google_compute_instance_template" "igm-basic" { + name = "%s" + machine_type = "n1-standard-1" + can_ip_forward = false + tags = ["foo", "bar"] + disk { + source_image = "${data.google_compute_image.my_image.self_link}" + auto_delete = true + boot = true + } + network_interface { + network = "default" + } + metadata { + foo = "bar" + } + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } +} +resource "google_compute_target_pool" "igm-basic" { + description = "Resource created for Terraform acceptance testing" + name = "%s" + session_affinity = "CLIENT_IP_PROTO" +} +resource "google_compute_region_instance_group_manager" "igm-basic" { + description = "Terraform test instance group manager" + name = "%s" + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "primary" + } + target_pools = ["${google_compute_target_pool.igm-basic.self_link}"] + base_instance_name = "igm-basic" + region = "us-central1" + target_size = 2 +} +resource "google_compute_http_health_check" "zero" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + `, template, target, igm, hck) +} +<% end -%> + func testAccRegionInstanceGroupManager_versions(primaryTemplate string, canaryTemplate string, igm string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -798,7 +944,14 @@ resource "google_compute_instance_template" "igm-basic" { resource "google_compute_region_instance_group_manager" "igm-basic" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-basic.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-basic.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "igm-basic" region = "us-central1" target_size = 2 @@ -807,6 +960,7 @@ resource "google_compute_region_instance_group_manager" "igm-basic" { `, template, igm, strings.Join(zones, "\",\"")) } +<% if version.nil? || version == 'ga' -%> func testAccRegionInstanceGroupManager_updateStrategy(igm string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { @@ -852,6 +1006,7 @@ resource "google_compute_region_instance_group_manager" "igm-update-strategy" { } }`, igm) } +<% end -%> func testAccRegionInstanceGroupManager_rollingUpdatePolicy(igm string) string { return fmt.Sprintf(` @@ -887,14 +1042,26 @@ resource "google_compute_instance_template" "igm-rolling-update-policy" { resource "google_compute_region_instance_group_manager" "igm-rolling-update-policy" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" + name = "primary" + } +<% end -%> base_instance_name = "igm-rolling-update-policy" region = "us-central1" target_size = 4 distribution_policy_zones = ["us-central1-a", "us-central1-f"] +<% if version.nil? || version == 'ga' -%> update_strategy = "ROLLING_UPDATE" rolling_update_policy { +<% else -%> + + update_policy { +<% end -%> type = "PROACTIVE" minimal_action = "REPLACE" max_surge_fixed = 2 @@ -939,14 +1106,26 @@ resource "google_compute_instance_template" "igm-rolling-update-policy" { resource "google_compute_region_instance_group_manager" "igm-rolling-update-policy" { description = "Terraform test instance group manager" name = "%s" +<% if version.nil? || version == 'ga' -%> instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" +<% else -%> + version { + name = "primary" + instance_template = "${google_compute_instance_template.igm-rolling-update-policy.self_link}" + } +<% end -%> base_instance_name = "igm-rolling-update-policy" region = "us-central1" distribution_policy_zones = ["us-central1-a", "us-central1-f"] target_size = 3 +<% if version.nil? || version == 'ga' -%> update_strategy = "ROLLING_UPDATE" rolling_update_policy { +<% else -%> + + update_policy { +<% end -%> type = "PROACTIVE" minimal_action = "REPLACE" max_surge_fixed = 2 diff --git a/provider/terraform/tests/resource_container_cluster_migrate_test.go b/provider/terraform/tests/resource_container_cluster_migrate_test.go new file mode 100644 index 000000000000..84fa535b6176 --- /dev/null +++ b/provider/terraform/tests/resource_container_cluster_migrate_test.go @@ -0,0 +1,75 @@ +package google + +import ( + "testing" + + "github.com/hashicorp/terraform/terraform" +) + +func TestContainerClusterMigrateState(t *testing.T) { + cases := map[string]struct { + StateVersion int + Attributes map[string]string + Expected map[string]string + Meta interface{} + }{ + "change additional_zones from list to set": { + StateVersion: 0, + Attributes: map[string]string{ + "additional_zones.#": "2", + "additional_zones.0": "us-central1-c", + "additional_zones.1": "us-central1-b", + }, + Expected: map[string]string{ + "additional_zones.#": "2", + "additional_zones.90274510": "us-central1-c", + "additional_zones.1919306328": "us-central1-b", + }, + Meta: &Config{}, + }, + } + + for tn, tc := range cases { + is := &terraform.InstanceState{ + ID: "i-abc123", + Attributes: tc.Attributes, + } + is, err := resourceContainerClusterMigrateState( + tc.StateVersion, is, tc.Meta) + + if err != nil { + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + for k, v := range tc.Expected { + if is.Attributes[k] != v { + t.Fatalf( + "bad: %s\n\n expected: %#v -> %#v\n got: %#v -> %#v\n in: %#v", + tn, k, v, k, is.Attributes[k], is.Attributes) + } + } + } +} + +func TestContainerClusterMigrateState_empty(t *testing.T) { + var is *terraform.InstanceState + var meta *Config + + // should handle nil + is, err := resourceContainerClusterMigrateState(0, is, meta) + + if err != nil { + t.Fatalf("err: %#v", err) + } + if is != nil { + t.Fatalf("expected nil instancestate, got: %#v", is) + } + + // should handle non-nil but empty + is = &terraform.InstanceState{} + is, err = resourceContainerClusterMigrateState(0, is, meta) + + if err != nil { + t.Fatalf("err: %#v", err) + } +} diff --git a/provider/terraform/tests/resource_container_cluster_test.go b/provider/terraform/tests/resource_container_cluster_test.go new file mode 100644 index 000000000000..283dd96751ee --- /dev/null +++ b/provider/terraform/tests/resource_container_cluster_test.go @@ -0,0 +1,2688 @@ +package google + +import ( + "bytes" + "fmt" + "testing" + + "strconv" + + "regexp" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccContainerCluster_basic(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_basic(clusterName), + }, + { + ResourceName: "google_container_cluster.primary", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_container_cluster.primary", + ImportStateIdPrefix: fmt.Sprintf("%s/us-central1-a/", getTestProjectFromEnv()), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withTimeout(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withTimeout(), + }, + { + ResourceName: "google_container_cluster.primary", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withAddons(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withAddons(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "addons_config.0.http_load_balancing.0.disabled", "true"), + resource.TestCheckResourceAttr("google_container_cluster.primary", "addons_config.0.kubernetes_dashboard.0.disabled", "true"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_updateAddons(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.primary", "addons_config.0.horizontal_pod_autoscaling.0.disabled", "true"), + resource.TestCheckResourceAttr("google_container_cluster.primary", "addons_config.0.http_load_balancing.0.disabled", "false"), + resource.TestCheckResourceAttr("google_container_cluster.primary", "addons_config.0.kubernetes_dashboard.0.disabled", "true"), + ), + }, + { + ResourceName: "google_container_cluster.primary", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withMasterAuthConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withMasterAuth(clusterName), + }, + { + ResourceName: "google_container_cluster.with_master_auth", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_updateMasterAuth(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_master_auth", "master_auth.0.username", "mr.yoda.adoy.mr"), + resource.TestCheckResourceAttr("google_container_cluster.with_master_auth", "master_auth.0.password", "adoy.rm.123456789.mr.yoda"), + ), + }, + { + ResourceName: "google_container_cluster.with_master_auth", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_disableMasterAuth(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_master_auth", "master_auth.0.username", ""), + resource.TestCheckResourceAttr("google_container_cluster.with_master_auth", "master_auth.0.password", ""), + ), + }, + { + ResourceName: "google_container_cluster.with_master_auth", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_updateMasterAuth(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_master_auth", "master_auth.0.username", "mr.yoda.adoy.mr"), + resource.TestCheckResourceAttr("google_container_cluster.with_master_auth", "master_auth.0.password", "adoy.rm.123456789.mr.yoda"), + ), + }, + { + ResourceName: "google_container_cluster.with_master_auth", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withMasterAuthConfig_NoCert(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withMasterAuthNoCert(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_master_auth_no_cert", "master_auth.0.client_certificate", ""), + ), + }, + { + ResourceName: "google_container_cluster.with_master_auth_no_cert", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withNetworkPolicyEnabled(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNetworkPolicyEnabled(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_network_policy_enabled", + "network_policy.#", "1"), + ), + }, + { + ResourceName: "google_container_cluster.with_network_policy_enabled", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"remove_default_node_pool"}, + }, + { + Config: testAccContainerCluster_removeNetworkPolicy(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr("google_container_cluster.with_network_policy_enabled", + "network_policy"), + ), + }, + { + ResourceName: "google_container_cluster.with_network_policy_enabled", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"remove_default_node_pool"}, + }, + { + Config: testAccContainerCluster_withNetworkPolicyDisabled(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_network_policy_enabled", + "network_policy.0.enabled", "false"), + ), + }, + { + ResourceName: "google_container_cluster.with_network_policy_enabled", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"remove_default_node_pool"}, + }, + { + Config: testAccContainerCluster_withNetworkPolicyConfigDisabled(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_network_policy_enabled", + "addons_config.0.network_policy_config.0.disabled", "true"), + ), + }, + { + ResourceName: "google_container_cluster.with_network_policy_enabled", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"remove_default_node_pool"}, + }, + { + Config: testAccContainerCluster_withNetworkPolicyConfigDisabled(clusterName), + PlanOnly: true, + ExpectNonEmptyPlan: false, + }, + }, + }) +} + +func TestAccContainerCluster_withMasterAuthorizedNetworksConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withMasterAuthorizedNetworksConfig(clusterName, []string{}, "cidr_blocks = []"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_master_authorized_networks", + "master_authorized_networks_config.#", "1"), + resource.TestCheckResourceAttr("google_container_cluster.with_master_authorized_networks", + "master_authorized_networks_config.0.cidr_blocks.#", "0"), + ), + }, + { + Config: testAccContainerCluster_withMasterAuthorizedNetworksConfig(clusterName, []string{"8.8.8.8/32"}, ""), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_master_authorized_networks", + "master_authorized_networks_config.0.cidr_blocks.#", "1"), + ), + }, + { + ResourceName: "google_container_cluster.with_master_authorized_networks", + ImportState: true, + ImportStateVerify: true, + ImportStateIdPrefix: "us-central1-a/", + }, + { + Config: testAccContainerCluster_withMasterAuthorizedNetworksConfig(clusterName, []string{"10.0.0.0/8", "8.8.8.8/32"}, ""), + }, + { + ResourceName: "google_container_cluster.with_master_authorized_networks", + ImportState: true, + ImportStateVerify: true, + ImportStateIdPrefix: "us-central1-a/", + }, + { + Config: testAccContainerCluster_withMasterAuthorizedNetworksConfig(clusterName, []string{}, ""), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr("google_container_cluster.with_master_authorized_networks", + "master_authorized_networks_config.0.cidr_blocks"), + ), + }, + { + ResourceName: "google_container_cluster.with_master_authorized_networks", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_regional(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-regional-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_regional(clusterName), + }, + { + ResourceName: "google_container_cluster.regional", + ImportStateIdPrefix: "us-central1/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_regionalWithNodePool(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-regional-%s", acctest.RandString(10)) + npName := fmt.Sprintf("tf-cluster-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_regionalWithNodePool(clusterName, npName), + }, + { + ResourceName: "google_container_cluster.regional", + ImportStateIdPrefix: "us-central1/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withAdditionalZones(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withAdditionalZones(clusterName), + }, + { + ResourceName: "google_container_cluster.with_additional_zones", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_updateAdditionalZones(clusterName), + }, + { + ResourceName: "google_container_cluster.with_additional_zones", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_regionalWithAdditionalZones(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_regionalAdditionalZones(clusterName), + }, + { + ResourceName: "google_container_cluster.with_additional_zones", + ImportStateIdPrefix: "us-central1/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_regionalUpdateAdditionalZones(clusterName), + }, + { + ResourceName: "google_container_cluster.with_additional_zones", + ImportStateIdPrefix: "us-central1/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withKubernetesAlpha(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withKubernetesAlpha(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_kubernetes_alpha", "enable_kubernetes_alpha", "true"), + ), + }, + { + ResourceName: "google_container_cluster.with_kubernetes_alpha", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withTpu(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withTpu(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_tpu", "enable_tpu", "true"), + ), + }, + { + ResourceName: "google_container_cluster.with_tpu", + ImportStateIdPrefix: "us-central1-b/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withPrivateCluster(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withPrivateCluster(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_private_cluster", "private_cluster", "true"), + ), + }, + { + ResourceName: "google_container_cluster.with_private_cluster", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withPrivateClusterConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withPrivateClusterConfig(clusterName), + }, + { + ResourceName: "google_container_cluster.with_private_cluster", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withLegacyAbac(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withLegacyAbac(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_legacy_abac", "enable_legacy_abac", "true"), + ), + }, + { + ResourceName: "google_container_cluster.with_legacy_abac", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_updateLegacyAbac(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_legacy_abac", "enable_legacy_abac", "false"), + ), + }, + { + ResourceName: "google_container_cluster.with_legacy_abac", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +/* + Since GKE disables legacy ABAC by default in Kubernetes version 1.8+, and the default Kubernetes + version for GKE is also 1.8+, this test will ensure that legacy ABAC is disabled by default to be + more consistent with default settings in the Cloud Console +*/ +func TestAccContainerCluster_withDefaultLegacyAbac(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_defaultLegacyAbac(acctest.RandString(10)), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.default_legacy_abac", "enable_legacy_abac", "false"), + ), + }, + { + ResourceName: "google_container_cluster.default_legacy_abac", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withVersion(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withVersion(clusterName), + }, + { + ResourceName: "google_container_cluster.with_version", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"min_master_version"}, + }, + }, + }) +} + +func TestAccContainerCluster_updateVersion(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withLowerVersion(clusterName), + }, + { + ResourceName: "google_container_cluster.with_version", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"min_master_version"}, + }, + { + Config: testAccContainerCluster_updateVersion(clusterName), + }, + { + ResourceName: "google_container_cluster.with_version", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"min_master_version"}, + }, + }, + }) +} + +func TestAccContainerCluster_withNodeConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodeConfig(clusterName), + }, + { + ResourceName: "google_container_cluster.with_node_config", + ImportStateIdPrefix: "us-central1-f/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_withNodeConfigUpdate(clusterName), + }, + { + ResourceName: "google_container_cluster.with_node_config", + ImportStateIdPrefix: "us-central1-f/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withNodeConfigScopeAlias(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodeConfigScopeAlias(), + }, + { + ResourceName: "google_container_cluster.with_node_config_scope_alias", + ImportStateIdPrefix: "us-central1-f/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withNodeConfigTaints(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodeConfigTaints(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_node_config", "node_config.0.taint.#", "2"), + ), + }, + // Don't include an import step because beta features can't yet be imported. + // Once taints are in GA, consider merging this test with the _withNodeConfig test. + }, + }) +} + +func TestAccContainerCluster_withWorkloadMetadataConfig(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withWorkloadMetadataConfig(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_workload_metadata_config", + "node_config.0.workload_metadata_config.0.node_metadata", "SECURE"), + ), + }, + { + ResourceName: "google_container_cluster.with_workload_metadata_config", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"min_master_version"}, + }, + }, + }) +} + +func TestAccContainerCluster_network(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_networkRef(), + }, + { + ResourceName: "google_container_cluster.with_net_ref_by_url", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_container_cluster.with_net_ref_by_name", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_backend(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_backendRef(), + }, + { + ResourceName: "google_container_cluster.primary", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withLogging(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withLogging(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_logging", "logging_service", "logging.googleapis.com"), + ), + }, + { + ResourceName: "google_container_cluster.with_logging", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_updateLogging(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_logging", "logging_service", "none"), + ), + }, + { + ResourceName: "google_container_cluster.with_logging", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withMonitoring(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withMonitoring(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_monitoring", "monitoring_service", "monitoring.googleapis.com"), + ), + }, + { + ResourceName: "google_container_cluster.with_monitoring", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_updateMonitoring(clusterName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_monitoring", "monitoring_service", "none"), + ), + }, + { + ResourceName: "google_container_cluster.with_monitoring", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withNodePoolBasic(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-cluster-nodepool-test-%s", acctest.RandString(10)) + npName := fmt.Sprintf("tf-cluster-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodePoolBasic(clusterName, npName), + }, + { + ResourceName: "google_container_cluster.with_node_pool", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withNodePoolResize(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-cluster-nodepool-test-%s", acctest.RandString(10)) + npName := fmt.Sprintf("tf-cluster-nodepool-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodePoolAdditionalZones(clusterName, npName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_node_pool", "node_pool.0.node_count", "2"), + ), + }, + { + ResourceName: "google_container_cluster.with_node_pool", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_withNodePoolResize(clusterName, npName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_node_pool", "node_pool.0.node_count", "3"), + ), + }, + { + ResourceName: "google_container_cluster.with_node_pool", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withNodePoolAutoscaling(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-cluster-nodepool-test-%s", acctest.RandString(10)) + npName := fmt.Sprintf("tf-cluster-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerCluster_withNodePoolAutoscaling(clusterName, npName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_node_pool", "node_pool.0.autoscaling.0.min_node_count", "1"), + resource.TestCheckResourceAttr("google_container_cluster.with_node_pool", "node_pool.0.autoscaling.0.max_node_count", "3"), + ), + }, + { + ResourceName: "google_container_cluster.with_node_pool", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + Config: testAccContainerCluster_withNodePoolUpdateAutoscaling(clusterName, npName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_node_pool", "node_pool.0.autoscaling.0.min_node_count", "1"), + resource.TestCheckResourceAttr("google_container_cluster.with_node_pool", "node_pool.0.autoscaling.0.max_node_count", "5"), + ), + }, + { + ResourceName: "google_container_cluster.with_node_pool", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + Config: testAccContainerCluster_withNodePoolBasic(clusterName, npName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr("google_container_cluster.with_node_pool", "node_pool.0.autoscaling.0.min_node_count"), + resource.TestCheckNoResourceAttr("google_container_cluster.with_node_pool", "node_pool.0.autoscaling.0.max_node_count"), + ), + }, + { + ResourceName: "google_container_cluster.with_node_pool", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withNodePoolNamePrefix(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodePoolNamePrefix(), + }, + { + ResourceName: "google_container_cluster.with_node_pool_name_prefix", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"node_pool.0.name_prefix"}, + }, + }, + }) +} + +func TestAccContainerCluster_withNodePoolMultiple(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodePoolMultiple(), + }, + { + ResourceName: "google_container_cluster.with_node_pool_multiple", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withNodePoolConflictingNameFields(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodePoolConflictingNameFields(), + ExpectError: regexp.MustCompile("Cannot specify both name and name_prefix for a node_pool"), + }, + }, + }) +} + +func TestAccContainerCluster_withNodePoolNodeConfig(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withNodePoolNodeConfig(), + }, + { + ResourceName: "google_container_cluster.with_node_pool_node_config", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withDefaultNodePoolRemoved(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withDefaultNodePoolRemoved(), + Check: resource.TestCheckResourceAttr( + "google_container_cluster.with_default_node_pool_removed", "node_pool.#", "0"), + }, + { + ResourceName: "google_container_cluster.with_default_node_pool_removed", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"remove_default_node_pool"}, + }, + }, + }) +} + +func TestAccContainerCluster_withMaintenanceWindow(t *testing.T) { + t.Parallel() + clusterName := acctest.RandString(10) + resourceName := "google_container_cluster.with_maintenance_window" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withMaintenanceWindow(clusterName, "03:00"), + }, + { + ResourceName: resourceName, + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_withMaintenanceWindow(clusterName, ""), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr(resourceName, + "maintenance_policy.0.daily_maintenance_window.0.start_time"), + ), + }, + { + ResourceName: resourceName, + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + // maintenance_policy.# = 0 is equivalent to no maintenance policy at all, + // but will still cause an import diff + ImportStateVerifyIgnore: []string{"maintenance_policy.#"}, + }, + }, + }) +} + +func TestAccContainerCluster_withIPAllocationPolicy_existingSecondaryRanges(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withIPAllocationPolicy_existingSecondaryRanges(cluster), + }, + { + ResourceName: "google_container_cluster.with_ip_allocation_policy", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withIPAllocationPolicy_specificIPRanges(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withIPAllocationPolicy_specificIPRanges(cluster), + }, + { + ResourceName: "google_container_cluster.with_ip_allocation_policy", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withIPAllocationPolicy_specificSizes(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withIPAllocationPolicy_specificSizes(cluster), + }, + { + ResourceName: "google_container_cluster.with_ip_allocation_policy", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withIPAllocationPolicy_createSubnetwork(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withIPAllocationPolicy_createSubnetwork(cluster), + }, + { + ResourceName: "google_container_cluster.with_ip_allocation_policy", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withPodSecurityPolicy(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withPodSecurityPolicy(clusterName, true), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_pod_security_policy", + "pod_security_policy_config.0.enabled", "true"), + ), + }, + { + ResourceName: "google_container_cluster.with_pod_security_policy", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_withPodSecurityPolicy(clusterName, false), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_cluster.with_pod_security_policy", + "pod_security_policy_config.0.enabled", "false"), + ), + }, + { + ResourceName: "google_container_cluster.with_pod_security_policy", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_sharedVpc(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + org := getTestOrgFromEnv(t) + billingId := getTestBillingAccountFromEnv(t) + projectName := fmt.Sprintf("tf-xpntest-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_sharedVpc(org, billingId, projectName, clusterName), + }, + { + ResourceName: "google_container_cluster.shared_vpc_cluster", + ImportStateIdPrefix: fmt.Sprintf("%s-service/us-central1-a/", projectName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withResourceLabels(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withResourceLabels(clusterName), + }, + { + ResourceName: "google_container_cluster.with_resource_labels", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withResourceLabelsUpdate(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withoutResourceLabels(clusterName), + }, + { + Config: testAccContainerCluster_withResourceLabels(clusterName), + }, + { + ResourceName: "google_container_cluster.with_resource_labels", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerCluster_withBinaryAuthorization(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("cluster-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withBinaryAuthorization(clusterName, true), + }, + { + ResourceName: "google_container_cluster.with_binary_authorization", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerCluster_withBinaryAuthorization(clusterName, false), + }, + { + ResourceName: "google_container_cluster.with_binary_authorization", + ImportStateIdPrefix: "us-central1-a/", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckContainerClusterDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_container_cluster" { + continue + } + + attributes := rs.Primary.Attributes + _, err := config.clientContainer.Projects.Zones.Clusters.Get( + config.Project, attributes["zone"], attributes["name"]).Do() + if err == nil { + return fmt.Errorf("Cluster still exists") + } + } + + return nil +} + +func getResourceAttributes(n string, s *terraform.State) (map[string]string, error) { + rs, ok := s.RootModule().Resources[n] + if !ok { + return nil, fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return nil, fmt.Errorf("No ID is set") + } + + return rs.Primary.Attributes, nil +} + +func checkMatch(attributes map[string]string, attr string, gcp interface{}) string { + if gcpList, ok := gcp.([]string); ok { + return checkListMatch(attributes, attr, gcpList) + } + if gcpMap, ok := gcp.(map[string]string); ok { + return checkMapMatch(attributes, attr, gcpMap) + } + if gcpBool, ok := gcp.(bool); ok { + return checkBoolMatch(attributes, attr, gcpBool) + } + + tf := attributes[attr] + if tf != gcp { + return matchError(attr, tf, gcp) + } + return "" +} + +func checkListMatch(attributes map[string]string, attr string, gcpList []string) string { + num, err := strconv.Atoi(attributes[attr+".#"]) + if err != nil { + return fmt.Sprintf("Error in number conversion for attribute %s: %s", attr, err) + } + if num != len(gcpList) { + return fmt.Sprintf("Cluster has mismatched %s size.\nTF Size: %d\nGCP Size: %d", attr, num, len(gcpList)) + } + + for i, gcp := range gcpList { + if tf := attributes[fmt.Sprintf("%s.%d", attr, i)]; tf != gcp { + return matchError(fmt.Sprintf("%s[%d]", attr, i), tf, gcp) + } + } + + return "" +} + +func checkMapMatch(attributes map[string]string, attr string, gcpMap map[string]string) string { + num, err := strconv.Atoi(attributes[attr+".%"]) + if err != nil { + return fmt.Sprintf("Error in number conversion for attribute %s: %s", attr, err) + } + if num != len(gcpMap) { + return fmt.Sprintf("Cluster has mismatched %s size.\nTF Size: %d\nGCP Size: %d", attr, num, len(gcpMap)) + } + + for k, gcp := range gcpMap { + if tf := attributes[fmt.Sprintf("%s.%s", attr, k)]; tf != gcp { + return matchError(fmt.Sprintf("%s[%s]", attr, k), tf, gcp) + } + } + + return "" +} + +func checkBoolMatch(attributes map[string]string, attr string, gcpBool bool) string { + // Handle the case where an unset value defaults to false + var tf bool + var err error + if attributes[attr] == "" { + tf = false + } else { + tf, err = strconv.ParseBool(attributes[attr]) + if err != nil { + return fmt.Sprintf("Error converting attribute %s to boolean: value is %s", attr, attributes[attr]) + } + } + + if tf != gcpBool { + return matchError(attr, tf, gcpBool) + } + + return "" +} + +func matchError(attr, tf interface{}, gcp interface{}) string { + return fmt.Sprintf("Cluster has mismatched %s.\nTF State: %+v\nGCP State: %+v", attr, tf, gcp) +} + +func testAccContainerCluster_basic(name string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 +}`, name) +} + +func testAccContainerCluster_withTimeout() string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 3 + + timeouts { + create = "30m" + delete = "30m" + update = "30m" + } +}`, acctest.RandString(10)) +} + +func testAccContainerCluster_withAddons(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 + + addons_config { + http_load_balancing { disabled = true } + kubernetes_dashboard { disabled = true } + } +}`, clusterName) +} + +func testAccContainerCluster_updateAddons(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "primary" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 + + addons_config { + http_load_balancing { disabled = false } + kubernetes_dashboard { disabled = true } + horizontal_pod_autoscaling { disabled = true } + } +}`, clusterName) +} + +func testAccContainerCluster_withMasterAuth(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_master_auth" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 + + master_auth { + username = "mr.yoda" + password = "adoy.rm.123456789" + } +}`, clusterName) +} + +func testAccContainerCluster_updateMasterAuth(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_master_auth" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 + + master_auth { + username = "mr.yoda.adoy.mr" + password = "adoy.rm.123456789.mr.yoda" + } +}`, clusterName) +} + +func testAccContainerCluster_disableMasterAuth(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_master_auth" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 + + master_auth { + username = "" + password = "" + } +}`, clusterName) +} + +func testAccContainerCluster_updateMasterAuthNoCert() string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_master_auth" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 3 + + master_auth { + username = "mr.yoda" + password = "adoy.rm.123456789" + client_certificate_config { + issue_client_certificate = false + } + } +}`, acctest.RandString(10)) +} + +func testAccContainerCluster_withMasterAuthNoCert() string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_master_auth_no_cert" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 3 + + master_auth { + username = "mr.yoda" + password = "adoy.rm.123456789" + client_certificate_config { + issue_client_certificate = false + } + } +}`, acctest.RandString(10)) +} + +func testAccContainerCluster_withNetworkPolicyEnabled(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_network_policy_enabled" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + remove_default_node_pool = true + + network_policy { + enabled = true + provider = "CALICO" + } + addons_config { + network_policy_config { + disabled = false + } + } +}`, clusterName) +} + +func testAccContainerCluster_removeNetworkPolicy(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_network_policy_enabled" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + remove_default_node_pool = true +}`, clusterName) +} + +func testAccContainerCluster_withNetworkPolicyDisabled(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_network_policy_enabled" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + remove_default_node_pool = true + + network_policy = {} +}`, clusterName) +} + +func testAccContainerCluster_withNetworkPolicyConfigDisabled(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_network_policy_enabled" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + remove_default_node_pool = true + + network_policy = {} + addons_config { + network_policy_config { + disabled = true + } + } +}`, clusterName) +} + +func testAccContainerCluster_withMasterAuthorizedNetworksConfig(clusterName string, cidrs []string, emptyValue string) string { + + cidrBlocks := emptyValue + if len(cidrs) > 0 { + var buf bytes.Buffer + buf.WriteString("cidr_blocks = [") + for _, c := range cidrs { + buf.WriteString(fmt.Sprintf(` + { + cidr_block = "%s" + display_name = "disp-%s" + },`, c, c)) + } + buf.WriteString("]") + cidrBlocks = buf.String() + } + + return fmt.Sprintf(` +resource "google_container_cluster" "with_master_authorized_networks" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + + master_authorized_networks_config { + %s + } +}`, clusterName, cidrBlocks) +} + +func testAccContainerCluster_regional(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "regional" { + name = "%s" + region = "us-central1" + initial_node_count = 1 +}`, clusterName) +} + +func testAccContainerCluster_regionalWithNodePool(cluster, nodePool string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "regional" { + name = "%s" + region = "us-central1" + + node_pool { + name = "%s" + } +}`, cluster, nodePool) +} + +func testAccContainerCluster_withAdditionalZones(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_additional_zones" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + + additional_zones = [ + "us-central1-b", + "us-central1-c" + ] +}`, clusterName) +} + +func testAccContainerCluster_updateAdditionalZones(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_additional_zones" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + + additional_zones = [ + "us-central1-f", + "us-central1-c", + ] +}`, clusterName) +} + +func testAccContainerCluster_regionalAdditionalZones(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_additional_zones" { + name = "%s" + region = "us-central1" + initial_node_count = 1 + + additional_zones = [ + "us-central1-f", + "us-central1-c", + ] +}`, clusterName) +} + +func testAccContainerCluster_regionalUpdateAdditionalZones(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_additional_zones" { + name = "%s" + region = "us-central1" + initial_node_count = 1 + + additional_zones = [ + "us-central1-f", + "us-central1-b", + ] +}`, clusterName) +} + +func testAccContainerCluster_withKubernetesAlpha(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_kubernetes_alpha" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + enable_kubernetes_alpha = true +}`, clusterName) +} + +func testAccContainerCluster_withTpu(clusterName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "container_network" { + name = "container-net-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "container_subnetwork" { + name = "${google_compute_network.container_network.name}" + network = "${google_compute_network.container_network.name}" + ip_cidr_range = "10.0.35.0/24" + region = "us-central1" + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.1.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.2.0.0/22" + } +} + +resource "google_container_cluster" "with_tpu" { + name = "cluster-test-%s" + zone = "us-central1-b" + initial_node_count = 1 + + enable_tpu = true + + network = "${google_compute_network.container_network.name}" + subnetwork = "${google_compute_subnetwork.container_subnetwork.name}" + + master_ipv4_cidr_block = "10.42.0.0/28" + master_authorized_networks_config { cidr_blocks = [] } + ip_allocation_policy { + cluster_secondary_range_name = "${google_compute_subnetwork.container_subnetwork.secondary_ip_range.0.range_name}" + services_secondary_range_name = "${google_compute_subnetwork.container_subnetwork.secondary_ip_range.1.range_name}" + } +}`, clusterName, clusterName) +} + +func testAccContainerCluster_defaultLegacyAbac(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "default_legacy_abac" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 +}`, clusterName) +} + +func testAccContainerCluster_withLegacyAbac(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_legacy_abac" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + enable_legacy_abac = true +}`, clusterName) +} + +func testAccContainerCluster_updateLegacyAbac(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_legacy_abac" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + enable_legacy_abac = false +}`, clusterName) +} + +func testAccContainerCluster_withVersion(clusterName string) string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + zone = "us-central1-a" +} + +resource "google_container_cluster" "with_version" { + name = "cluster-test-%s" + zone = "us-central1-a" + min_master_version = "${data.google_container_engine_versions.central1a.latest_master_version}" + initial_node_count = 1 +}`, clusterName) +} + +func testAccContainerCluster_withLowerVersion(clusterName string) string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + zone = "us-central1-a" +} + +resource "google_container_cluster" "with_version" { + name = "cluster-test-%s" + zone = "us-central1-a" + min_master_version = "${data.google_container_engine_versions.central1a.valid_master_versions.2}" + initial_node_count = 1 +}`, clusterName) +} + +func testAccContainerCluster_updateVersion(clusterName string) string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + zone = "us-central1-a" +} + +resource "google_container_cluster" "with_version" { + name = "cluster-test-%s" + zone = "us-central1-a" + min_master_version = "${data.google_container_engine_versions.central1a.valid_master_versions.1}" + node_version = "${data.google_container_engine_versions.central1a.valid_node_versions.1}" + initial_node_count = 1 +}`, clusterName) +} + +func testAccContainerCluster_withNodeConfig(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_config" { + name = "%s" + zone = "us-central1-f" + initial_node_count = 1 + + node_config { + machine_type = "n1-standard-1" + disk_size_gb = 15 + disk_type = "pd-ssd" + local_ssd_count = 1 + oauth_scopes = [ + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/devstorage.read_only", + "https://www.googleapis.com/auth/logging.write" + ] + service_account = "default" + metadata { + foo = "bar" + } + labels { + foo = "bar" + } + tags = ["foo", "bar"] + preemptible = true + min_cpu_platform = "Intel Broadwell" + + // Updatable fields + image_type = "COS" + } +}`, clusterName) +} + +func testAccContainerCluster_withNodeConfigUpdate(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_config" { + name = "%s" + zone = "us-central1-f" + initial_node_count = 1 + + node_config { + machine_type = "n1-standard-1" + disk_size_gb = 15 + disk_type = "pd-ssd" + local_ssd_count = 1 + oauth_scopes = [ + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/devstorage.read_only", + "https://www.googleapis.com/auth/logging.write" + ] + service_account = "default" + metadata { + foo = "bar" + } + labels { + foo = "bar" + } + tags = ["foo", "bar"] + preemptible = true + min_cpu_platform = "Intel Broadwell" + + // Updatable fields + image_type = "UBUNTU" + } +}`, clusterName) +} + +func testAccContainerCluster_withNodeConfigScopeAlias() string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_config_scope_alias" { + name = "cluster-test-%s" + zone = "us-central1-f" + initial_node_count = 1 + + node_config { + machine_type = "g1-small" + disk_size_gb = 15 + oauth_scopes = [ "compute-rw", "storage-ro", "logging-write", "monitoring" ] + } +}`, acctest.RandString(10)) +} + +func testAccContainerCluster_withNodeConfigTaints() string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_config" { + name = "cluster-test-%s" + zone = "us-central1-f" + initial_node_count = 1 + + node_config { + taint { + key = "taint_key" + value = "taint_value" + effect = "PREFER_NO_SCHEDULE" + } + taint { + key = "taint_key2" + value = "taint_value2" + effect = "NO_EXECUTE" + } + } +}`, acctest.RandString(10)) +} + +func testAccContainerCluster_withWorkloadMetadataConfig() string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + zone = "us-central1-a" +} + +resource "google_container_cluster" "with_workload_metadata_config" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + min_master_version = "${data.google_container_engine_versions.central1a.latest_master_version}" + node_version = "${data.google_container_engine_versions.central1a.latest_node_version}" + + node_config { + oauth_scopes = [ + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring" + ] + + workload_metadata_config { + node_metadata = "SECURE" + } + } +} +`, acctest.RandString(10)) +} + +func testAccContainerCluster_networkRef() string { + return fmt.Sprintf(` +resource "google_compute_network" "container_network" { + name = "container-net-%s" + auto_create_subnetworks = true +} + +resource "google_container_cluster" "with_net_ref_by_url" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + network = "${google_compute_network.container_network.self_link}" +} + +resource "google_container_cluster" "with_net_ref_by_name" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + network = "${google_compute_network.container_network.name}" +}`, acctest.RandString(10), acctest.RandString(10), acctest.RandString(10)) +} + +func testAccContainerCluster_backendRef() string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "my-backend-service" { + name = "terraform-test-%s" + port_name = "http" + protocol = "HTTP" + + backend { + group = "${element(google_container_cluster.primary.instance_group_urls, 1)}" + } + + health_checks = ["${google_compute_http_health_check.default.self_link}"] +} + +resource "google_compute_http_health_check" "default" { + name = "terraform-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_container_cluster" "primary" { + name = "terraform-test-%s" + zone = "us-central1-a" + initial_node_count = 3 + + additional_zones = [ + "us-central1-b", + "us-central1-c", + ] + + node_config { + oauth_scopes = [ + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/devstorage.read_only", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring", + ] + } +} +`, acctest.RandString(10), acctest.RandString(10), acctest.RandString(10)) +} + +func testAccContainerCluster_withLogging(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_logging" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + logging_service = "logging.googleapis.com" +}`, clusterName) +} + +func testAccContainerCluster_updateLogging(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_logging" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + logging_service = "none" +}`, clusterName) +} + +func testAccContainerCluster_withMonitoring(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_monitoring" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + monitoring_service = "monitoring.googleapis.com" +}`, clusterName) +} + +func testAccContainerCluster_updateMonitoring(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_monitoring" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + monitoring_service = "none" +}`, clusterName) +} + +func testAccContainerCluster_withNodePoolBasic(cluster, nodePool string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool" { + name = "%s" + zone = "us-central1-a" + + node_pool { + name = "%s" + initial_node_count = 2 + } +}`, cluster, nodePool) +} + +func testAccContainerCluster_withNodePoolAdditionalZones(cluster, nodePool string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool" { + name = "%s" + zone = "us-central1-a" + + additional_zones = [ + "us-central1-b", + "us-central1-c" + ] + + node_pool { + name = "%s" + node_count = 2 + } +}`, cluster, nodePool) +} + +func testAccContainerCluster_withNodePoolResize(cluster, nodePool string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool" { + name = "%s" + zone = "us-central1-a" + + additional_zones = [ + "us-central1-b", + "us-central1-c" + ] + + node_pool { + name = "%s" + node_count = 3 + } +}`, cluster, nodePool) +} + +func testAccContainerCluster_withNodePoolAutoscaling(cluster, np string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool" { + name = "%s" + zone = "us-central1-a" + + node_pool { + name = "%s" + initial_node_count = 2 + autoscaling { + min_node_count = 1 + max_node_count = 3 + } + } +}`, cluster, np) +} + +func testAccContainerCluster_withNodePoolUpdateAutoscaling(cluster, np string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool" { + name = "%s" + zone = "us-central1-a" + + node_pool { + name = "%s" + initial_node_count = 2 + autoscaling { + min_node_count = 1 + max_node_count = 5 + } + } +}`, cluster, np) +} + +func testAccContainerCluster_withNodePoolNamePrefix() string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool_name_prefix" { + name = "tf-cluster-nodepool-test-%s" + zone = "us-central1-a" + + node_pool { + name_prefix = "tf-np-test" + node_count = 2 + } +}`, acctest.RandString(10)) +} + +func testAccContainerCluster_withNodePoolMultiple() string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool_multiple" { + name = "tf-cluster-nodepool-test-%s" + zone = "us-central1-a" + + node_pool { + name = "tf-cluster-nodepool-test-%s" + node_count = 2 + } + + node_pool { + name = "tf-cluster-nodepool-test-%s" + node_count = 3 + } +}`, acctest.RandString(10), acctest.RandString(10), acctest.RandString(10)) +} + +func testAccContainerCluster_withNodePoolConflictingNameFields() string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool_multiple" { + name = "tf-cluster-nodepool-test-%s" + zone = "us-central1-a" + + node_pool { + # ERROR: name and name_prefix cannot be both specified + name = "tf-cluster-nodepool-test-%s" + name_prefix = "tf-cluster-nodepool-test-" + node_count = 1 + } +}`, acctest.RandString(10), acctest.RandString(10)) +} + +func testAccContainerCluster_withNodePoolNodeConfig() string { + testId := acctest.RandString(10) + return fmt.Sprintf(` +resource "google_container_cluster" "with_node_pool_node_config" { + name = "tf-cluster-nodepool-test-%s" + zone = "us-central1-a" + node_pool { + name = "tf-cluster-nodepool-test-%s" + node_count = 2 + node_config { + machine_type = "n1-standard-1" + disk_size_gb = 15 + local_ssd_count = 1 + oauth_scopes = [ + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/devstorage.read_only", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring" + ] + service_account = "default" + metadata { + foo = "bar" + } + image_type = "COS" + labels { + foo = "bar" + } + tags = ["foo", "bar"] + } + } + +} +`, testId, testId) +} + +func testAccContainerCluster_withDefaultNodePoolRemoved() string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_default_node_pool_removed" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + remove_default_node_pool = true +} +`, acctest.RandString(10)) +} + +func testAccContainerCluster_withMaintenanceWindow(clusterName string, startTime string) string { + maintenancePolicy := "" + if len(startTime) > 0 { + maintenancePolicy = fmt.Sprintf(` + maintenance_policy { + daily_maintenance_window { + start_time = "%s" + } + }`, startTime) + } + + return fmt.Sprintf(` +resource "google_container_cluster" "with_maintenance_window" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + %s +}`, clusterName, maintenancePolicy) +} + +func testAccContainerCluster_withIPAllocationPolicy_existingSecondaryRanges(cluster string) string { + return fmt.Sprintf(` +resource "google_compute_network" "container_network" { + name = "container-net-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "container_subnetwork" { + name = "${google_compute_network.container_network.name}" + network = "${google_compute_network.container_network.name}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + + secondary_ip_range { + range_name = "pods" + ip_cidr_range = "10.1.0.0/16" + } + secondary_ip_range { + range_name = "services" + ip_cidr_range = "10.2.0.0/20" + } +} + +resource "google_container_cluster" "with_ip_allocation_policy" { + name = "%s" + zone = "us-central1-a" + + network = "${google_compute_network.container_network.name}" + subnetwork = "${google_compute_subnetwork.container_subnetwork.name}" + + initial_node_count = 1 + ip_allocation_policy { + cluster_secondary_range_name = "pods" + services_secondary_range_name = "services" + } +}`, cluster, cluster) +} + +func testAccContainerCluster_withIPAllocationPolicy_specificIPRanges(cluster string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_ip_allocation_policy" { + name = "%s" + zone = "us-central1-a" + + initial_node_count = 1 + ip_allocation_policy { + cluster_ipv4_cidr_block = "10.90.0.0/19" + services_ipv4_cidr_block = "10.40.0.0/19" + } +}`, cluster) +} + +func testAccContainerCluster_withIPAllocationPolicy_specificSizes(cluster string) string { + return fmt.Sprintf(` +resource "google_compute_network" "container_network" { + name = "container-net-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "container_subnetwork" { + name = "${google_compute_network.container_network.name}" + network = "${google_compute_network.container_network.name}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" +} + +resource "google_container_cluster" "with_ip_allocation_policy" { + name = "%s" + zone = "us-central1-a" + + network = "${google_compute_network.container_network.name}" + subnetwork = "${google_compute_subnetwork.container_subnetwork.name}" + + initial_node_count = 1 + ip_allocation_policy { + cluster_ipv4_cidr_block = "/16" + services_ipv4_cidr_block = "/22" + } +}`, cluster, cluster) +} + +func testAccContainerCluster_withIPAllocationPolicy_createSubnetwork(cluster string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_ip_allocation_policy" { + name = "%s" + zone = "us-central1-a" + + initial_node_count = 1 + ip_allocation_policy { + create_subnetwork = true + } +}`, cluster) +} + +func testAccContainerCluster_withPodSecurityPolicy(clusterName string, enabled bool) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_pod_security_policy" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + pod_security_policy_config { + enabled = %v + } +}`, clusterName, enabled) +} + +func testAccContainerCluster_withPrivateCluster(clusterName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "container_network" { + name = "container-net-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "container_subnetwork" { + name = "${google_compute_network.container_network.name}" + network = "${google_compute_network.container_network.name}" + ip_cidr_range = "10.0.36.0/24" + region = "us-central1" + private_ip_google_access = true + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.0.32.0/22" + } +} + +resource "google_container_cluster" "with_private_cluster" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + network = "${google_compute_network.container_network.name}" + subnetwork = "${google_compute_subnetwork.container_subnetwork.name}" + + private_cluster = true + master_ipv4_cidr_block = "10.42.0.0/28" + master_authorized_networks_config { cidr_blocks = [] } + ip_allocation_policy { + cluster_secondary_range_name = "${google_compute_subnetwork.container_subnetwork.secondary_ip_range.0.range_name}" + services_secondary_range_name = "${google_compute_subnetwork.container_subnetwork.secondary_ip_range.1.range_name}" + } +}`, clusterName, clusterName) +} + +func testAccContainerCluster_withPrivateClusterConfig(clusterName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "container_network" { + name = "container-net-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "container_subnetwork" { + name = "${google_compute_network.container_network.name}" + network = "${google_compute_network.container_network.name}" + ip_cidr_range = "10.0.36.0/24" + region = "us-central1" + private_ip_google_access = true + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.0.32.0/22" + } +} + +resource "google_container_cluster" "with_private_cluster" { + name = "cluster-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + + network = "${google_compute_network.container_network.name}" + subnetwork = "${google_compute_subnetwork.container_subnetwork.name}" + + private_cluster_config { + enable_private_endpoint = true + enable_private_nodes = true + master_ipv4_cidr_block = "10.42.0.0/28" + } + master_authorized_networks_config { cidr_blocks = [] } + ip_allocation_policy { + cluster_secondary_range_name = "${google_compute_subnetwork.container_subnetwork.secondary_ip_range.0.range_name}" + services_secondary_range_name = "${google_compute_subnetwork.container_subnetwork.secondary_ip_range.1.range_name}" + } +}`, clusterName, clusterName) +} + +func testAccContainerCluster_sharedVpc(org, billingId, projectName, name string) string { + return fmt.Sprintf(` +resource "google_project" "host_project" { + name = "Test Project XPN Host" + project_id = "%s-host" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "host_project" { + project = "${google_project.host_project.project_id}" + service = "container.googleapis.com" +} + +resource "google_compute_shared_vpc_host_project" "host_project" { + project = "${google_project_service.host_project.project}" +} + +resource "google_project" "service_project" { + name = "Test Project XPN Service" + project_id = "%s-service" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "service_project" { + project = "${google_project.service_project.project_id}" + service = "container.googleapis.com" +} + +resource "google_compute_shared_vpc_service_project" "service_project" { + host_project = "${google_compute_shared_vpc_host_project.host_project.project}" + service_project = "${google_project_service.service_project.project}" +} + +resource "google_project_iam_member" "host_service_agent" { + project = "${google_project_service.host_project.project}" + role = "roles/container.hostServiceAgentUser" + member = "serviceAccount:service-${google_project.service_project.number}@container-engine-robot.iam.gserviceaccount.com" + + depends_on = ["google_project_service.service_project"] +} + +resource "google_compute_subnetwork_iam_member" "service_network_cloud_services" { + project = "${google_compute_shared_vpc_host_project.host_project.project}" + subnetwork = "${google_compute_subnetwork.shared_subnetwork.name}" + role = "roles/compute.networkUser" + member = "serviceAccount:${google_project.service_project.number}@cloudservices.gserviceaccount.com" +} + +resource "google_compute_subnetwork_iam_member" "service_network_gke_user" { + project = "${google_compute_shared_vpc_host_project.host_project.project}" + subnetwork = "${google_compute_subnetwork.shared_subnetwork.name}" + role = "roles/compute.networkUser" + member = "serviceAccount:service-${google_project.service_project.number}@container-engine-robot.iam.gserviceaccount.com" +} + +resource "google_compute_network" "shared_network" { + name = "test-%s" + project = "${google_compute_shared_vpc_host_project.host_project.project}" + + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "shared_subnetwork" { + name = "test-%s" + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" + network = "${google_compute_network.shared_network.self_link}" + project = "${google_compute_shared_vpc_host_project.host_project.project}" + + secondary_ip_range { + range_name = "pods" + ip_cidr_range = "10.1.0.0/16" + } + + secondary_ip_range { + range_name = "services" + ip_cidr_range = "10.2.0.0/20" + } +} + +resource "google_container_cluster" "shared_vpc_cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + project = "${google_compute_shared_vpc_service_project.service_project.service_project}" + + network = "${google_compute_network.shared_network.self_link}" + subnetwork = "${google_compute_subnetwork.shared_subnetwork.self_link}" + + ip_allocation_policy { + cluster_secondary_range_name = "${google_compute_subnetwork.shared_subnetwork.secondary_ip_range.0.range_name}" + services_secondary_range_name = "${google_compute_subnetwork.shared_subnetwork.secondary_ip_range.1.range_name}" + } + + depends_on = [ + "google_project_iam_member.host_service_agent", + "google_compute_subnetwork_iam_member.service_network_cloud_services", + "google_compute_subnetwork_iam_member.service_network_gke_user" + ] +}`, projectName, org, billingId, projectName, org, billingId, acctest.RandString(10), acctest.RandString(10), name) +} + +func testAccContainerCluster_withoutResourceLabels(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_resource_labels" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 +} +`, clusterName) +} + +func testAccContainerCluster_withResourceLabels(clusterName string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_resource_labels" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + + resource_labels { + created-by = "terraform" + } +} +`, clusterName) +} + +func testAccContainerCluster_withBinaryAuthorization(clusterName string, enabled bool) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_binary_authorization" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + + enable_binary_authorization = %v +} +`, clusterName, enabled) +} diff --git a/provider/terraform/tests/resource_container_node_pool_migrate_test.go b/provider/terraform/tests/resource_container_node_pool_migrate_test.go new file mode 100644 index 000000000000..76f807e1c2a8 --- /dev/null +++ b/provider/terraform/tests/resource_container_node_pool_migrate_test.go @@ -0,0 +1,65 @@ +package google + +import ( + "github.com/hashicorp/terraform/terraform" + "testing" +) + +func TestContainerNodePoolMigrateState(t *testing.T) { + cases := map[string]struct { + StateVersion int + Attributes map[string]string + ExpectedId string + Meta interface{} + }{ + "update id from name to zone/cluster/name": { + StateVersion: 0, + Attributes: map[string]string{ + "name": "node-pool-1", + "zone": "us-central1-c", + "cluster": "cluster-1", + }, + ExpectedId: "us-central1-c/cluster-1/node-pool-1", + }, + } + + for tn, tc := range cases { + is := &terraform.InstanceState{ + ID: tc.Attributes["name"], + Attributes: tc.Attributes, + } + + is, err := resourceContainerNodePoolMigrateState(tc.StateVersion, is, tc.Meta) + + if err != nil { + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + if is.ID != tc.ExpectedId { + t.Fatalf("Id should be set to `%s` but is `%s`", tc.ExpectedId, is.ID) + } + } +} + +func TestContainerNodePoolMigrateState_empty(t *testing.T) { + var is *terraform.InstanceState + var meta *Config + + // should handle nil + is, err := resourceContainerNodePoolMigrateState(0, is, meta) + + if err != nil { + t.Fatalf("err: %#v", err) + } + if is != nil { + t.Fatalf("expected nil instancestate, got: %#v", is) + } + + // should handle non-nil but empty + is = &terraform.InstanceState{} + is, err = resourceContainerNodePoolMigrateState(0, is, meta) + + if err != nil { + t.Fatalf("err: %#v", err) + } +} diff --git a/provider/terraform/tests/resource_container_node_pool_test.go b/provider/terraform/tests/resource_container_node_pool_test.go new file mode 100644 index 000000000000..9a4bb505cb52 --- /dev/null +++ b/provider/terraform/tests/resource_container_node_pool_test.go @@ -0,0 +1,989 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccContainerNodePool_basic(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + np := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_basic(cluster, np), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_maxPodsPerNode(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + np := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_maxPodsPerNode(cluster, np), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_namePrefix(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_namePrefix(cluster, "tf-np-"), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name_prefix"}, + }, + }, + }) +} + +func TestAccContainerNodePool_noName(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_noName(cluster), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_withNodeConfig(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + nodePool := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_withNodeConfig(cluster, nodePool), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np_with_node_config", + ImportState: true, + ImportStateVerify: true, + // autoscaling.# = 0 is equivalent to no autoscaling at all, + // but will still cause an import diff + ImportStateVerifyIgnore: []string{"autoscaling.#"}, + }, + resource.TestStep{ + Config: testAccContainerNodePool_withNodeConfigUpdate(cluster, nodePool), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np_with_node_config", + ImportState: true, + ImportStateVerify: true, + // autoscaling.# = 0 is equivalent to no autoscaling at all, + // but will still cause an import diff + ImportStateVerifyIgnore: []string{"autoscaling.#"}, + }, + }, + }) +} + +func TestAccContainerNodePool_withNodeConfigTaints(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_withNodeConfigTaints(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_node_pool.np_with_node_config", "node_config.0.taint.#", "2"), + ), + }, + // Don't include an import step because beta features can't yet be imported. + // Once taints are in GA, consider merging this test with the _withNodeConfig test. + }, + }) +} + +func TestAccContainerNodePool_withWorkloadMetadataConfig(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_withWorkloadMetadataConfig(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_node_pool.with_workload_metadata_config", + "node_config.0.workload_metadata_config.0.node_metadata", "SECURE"), + ), + }, + { + ResourceName: "google_container_node_pool.with_workload_metadata_config", + ImportState: true, + ImportStateVerify: true, + // Import always uses the v1 API, so beta features don't get imported. + ImportStateVerifyIgnore: []string{ + "node_config.0.workload_metadata_config.#", + "node_config.0.workload_metadata_config.0.node_metadata", + }, + }, + }, + }) +} + +func TestAccContainerNodePool_withGPU(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_withGPU(), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np_with_gpu", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_withManagement(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + nodePool := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + management := ` + management { + auto_repair = "true" + auto_upgrade = "true" + }` + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_withManagement(cluster, nodePool, ""), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_container_node_pool.np_with_management", "management.#", "1"), + resource.TestCheckResourceAttr( + "google_container_node_pool.np_with_management", "management.0.auto_repair", "false"), + resource.TestCheckResourceAttr( + "google_container_node_pool.np_with_management", "management.0.auto_repair", "false"), + ), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np_with_management", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + Config: testAccContainerNodePool_withManagement(cluster, nodePool, management), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_container_node_pool.np_with_management", "management.#", "1"), + resource.TestCheckResourceAttr( + "google_container_node_pool.np_with_management", "management.0.auto_repair", "true"), + resource.TestCheckResourceAttr( + "google_container_node_pool.np_with_management", "management.0.auto_repair", "true"), + ), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np_with_management", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_withNodeConfigScopeAlias(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_withNodeConfigScopeAlias(), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np_with_node_config_scope_alias", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +//This test exists to validate a regional node pool *and* and update to it. +func TestAccContainerNodePool_regionalAutoscaling(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + np := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_regionalAutoscaling(cluster, np), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_node_pool.np", "autoscaling.0.min_node_count", "1"), + resource.TestCheckResourceAttr("google_container_node_pool.np", "autoscaling.0.max_node_count", "3"), + ), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + Config: testAccContainerNodePool_updateAutoscaling(cluster, np), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_node_pool.np", "autoscaling.0.min_node_count", "0"), + resource.TestCheckResourceAttr("google_container_node_pool.np", "autoscaling.0.max_node_count", "5"), + ), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + Config: testAccContainerNodePool_basic(cluster, np), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr("google_container_node_pool.np", "autoscaling.0.min_node_count"), + resource.TestCheckNoResourceAttr("google_container_node_pool.np", "autoscaling.0.max_node_count"), + ), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + // autoscaling.# = 0 is equivalent to no autoscaling at all, + // but will still cause an import diff + ImportStateVerifyIgnore: []string{"autoscaling.#"}, + }, + }, + }) +} + +func TestAccContainerNodePool_autoscaling(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + np := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_autoscaling(cluster, np), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_node_pool.np", "autoscaling.0.min_node_count", "1"), + resource.TestCheckResourceAttr("google_container_node_pool.np", "autoscaling.0.max_node_count", "3"), + ), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + Config: testAccContainerNodePool_updateAutoscaling(cluster, np), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_node_pool.np", "autoscaling.0.min_node_count", "0"), + resource.TestCheckResourceAttr("google_container_node_pool.np", "autoscaling.0.max_node_count", "5"), + ), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + Config: testAccContainerNodePool_basic(cluster, np), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckNoResourceAttr("google_container_node_pool.np", "autoscaling.0.min_node_count"), + resource.TestCheckNoResourceAttr("google_container_node_pool.np", "autoscaling.0.max_node_count"), + ), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + // autoscaling.# = 0 is equivalent to no autoscaling at all, + // but will still cause an import diff + ImportStateVerifyIgnore: []string{"autoscaling.#"}, + }, + }, + }) +} + +func TestAccContainerNodePool_resize(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + np := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_additionalZones(cluster, np), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_node_pool.np", "node_count", "2"), + ), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerNodePool_resize(cluster, np), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_container_node_pool.np", "node_count", "3"), + ), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccContainerNodePool_version(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + np := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerNodePool_version(cluster, np), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerNodePool_updateVersion(cluster, np), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccContainerNodePool_version(cluster, np), + }, + { + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckContainerNodePoolDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_container_node_pool" { + continue + } + + attributes := rs.Primary.Attributes + zone := attributes["zone"] + + var err error + if zone != "" { + _, err = config.clientContainer.Projects.Zones.Clusters.NodePools.Get( + config.Project, attributes["zone"], attributes["cluster"], attributes["name"]).Do() + } else { + name := fmt.Sprintf( + "projects/%s/locations/%s/clusters/%s/nodePools/%s", + config.Project, + attributes["region"], + attributes["cluster"], + attributes["name"], + ) + _, err = config.clientContainerBeta.Projects.Locations.Clusters.NodePools.Get(name).Do() + } + + if err == nil { + return fmt.Errorf("NodePool still exists") + } + } + + return nil +} + +func TestAccContainerNodePool_regionalClusters(t *testing.T) { + t.Parallel() + + cluster := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + np := fmt.Sprintf("tf-nodepool-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerNodePoolDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccContainerNodePool_regionalClusters(cluster, np), + }, + resource.TestStep{ + ResourceName: "google_container_node_pool.np", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccContainerNodePool_basic(cluster, np string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 +} + +resource "google_container_node_pool" "np" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 2 +}`, cluster, np) +} + +func testAccContainerNodePool_maxPodsPerNode(cluster, np string) string { + return fmt.Sprintf(` +resource "google_compute_network" "container_network" { + name = "container-net-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "container_subnetwork" { + name = "${google_compute_network.container_network.name}" + network = "${google_compute_network.container_network.name}" + ip_cidr_range = "10.0.36.0/24" + region = "us-central1" + private_ip_google_access = true + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.0.32.0/22" + } +} + +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 + + network = "${google_compute_network.container_network.name}" + subnetwork = "${google_compute_subnetwork.container_subnetwork.name}" + private_cluster = true + master_ipv4_cidr_block = "10.42.0.0/28" + ip_allocation_policy { + cluster_secondary_range_name = "${google_compute_subnetwork.container_subnetwork.secondary_ip_range.0.range_name}" + services_secondary_range_name = "${google_compute_subnetwork.container_subnetwork.secondary_ip_range.1.range_name}" + } + master_authorized_networks_config { + cidr_blocks = [] + } +} + +resource "google_container_node_pool" "np" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + max_pods_per_node = 30 + initial_node_count = 2 +}`, cluster, cluster, np) +} + +func testAccContainerNodePool_regionalClusters(cluster, np string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + region = "us-central1" + initial_node_count = 3 +} + +resource "google_container_node_pool" "np" { + name = "%s" + cluster = "${google_container_cluster.cluster.name}" + region = "us-central1" + initial_node_count = 2 +}`, cluster, np) +} + +func testAccContainerNodePool_namePrefix(cluster, np string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 +} + +resource "google_container_node_pool" "np" { + name_prefix = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 2 +}`, cluster, np) +} + +func testAccContainerNodePool_noName(cluster string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 +} + +resource "google_container_node_pool" "np" { + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 2 +}`, cluster) +} + +func testAccContainerNodePool_regionalAutoscaling(cluster, np string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + region = "us-central1" + initial_node_count = 3 +} + +resource "google_container_node_pool" "np" { + name = "%s" + region = "us-central1" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 2 + autoscaling { + min_node_count = 1 + max_node_count = 3 + } +}`, cluster, np) +} + +func testAccContainerNodePool_autoscaling(cluster, np string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 +} + +resource "google_container_node_pool" "np" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 2 + autoscaling { + min_node_count = 1 + max_node_count = 3 + } +}`, cluster, np) +} + +func testAccContainerNodePool_updateAutoscaling(cluster, np string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 3 +} + +resource "google_container_node_pool" "np" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 2 + autoscaling { + min_node_count = 0 + max_node_count = 5 + } +}`, cluster, np) +} + +func testAccContainerNodePool_additionalZones(cluster, nodePool string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + + additional_zones = [ + "us-central1-b", + "us-central1-c" + ] +} + +resource "google_container_node_pool" "np" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + node_count = 2 +}`, cluster, nodePool) +} + +func testAccContainerNodePool_resize(cluster, nodePool string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + + additional_zones = [ + "us-central1-b", + "us-central1-c" + ] +} + +resource "google_container_node_pool" "np" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + node_count = 3 +}`, cluster, nodePool) +} + +func testAccContainerNodePool_withManagement(cluster, nodePool, management string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 +} + +resource "google_container_node_pool" "np_with_management" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + + %s + + node_config { + machine_type = "g1-small" + disk_size_gb = 10 + oauth_scopes = ["compute-rw", "storage-ro", "logging-write", "monitoring"] + } +}`, cluster, nodePool, management) +} + +func testAccContainerNodePool_withNodeConfig(cluster, nodePool string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 +} +resource "google_container_node_pool" "np_with_node_config" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + node_config { + machine_type = "g1-small" + disk_size_gb = 10 + oauth_scopes = [ + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/devstorage.read_only", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring" + ] + preemptible = true + min_cpu_platform = "Intel Broadwell" + + // Updatable fields + image_type = "COS" + } +}`, cluster, nodePool) +} + +func testAccContainerNodePool_withNodeConfigUpdate(cluster, nodePool string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 +} +resource "google_container_node_pool" "np_with_node_config" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + node_config { + machine_type = "g1-small" + disk_size_gb = 10 + oauth_scopes = [ + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/devstorage.read_only", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring" + ] + preemptible = true + min_cpu_platform = "Intel Broadwell" + + // Updatable fields + image_type = "UBUNTU" + } +}`, cluster, nodePool) +} + +func testAccContainerNodePool_withNodeConfigTaints() string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "tf-cluster-nodepool-test-%s" + zone = "us-central1-a" + initial_node_count = 1 +} +resource "google_container_node_pool" "np_with_node_config" { + name = "tf-nodepool-test-%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + node_config { + taint { + key = "taint_key" + value = "taint_value" + effect = "PREFER_NO_SCHEDULE" + } + taint { + key = "taint_key2" + value = "taint_value2" + effect = "NO_EXECUTE" + } + } +}`, acctest.RandString(10), acctest.RandString(10)) +} + +func testAccContainerNodePool_withWorkloadMetadataConfig() string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + zone = "us-central1-a" +} + +resource "google_container_cluster" "cluster" { + name = "tf-cluster-nodepool-test-%s" + zone = "us-central1-a" + initial_node_count = 1 + min_master_version = "${data.google_container_engine_versions.central1a.latest_master_version}" +} + +resource "google_container_node_pool" "with_workload_metadata_config" { + name = "tf-nodepool-test-%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + node_config { + oauth_scopes = [ + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring" + ] + + workload_metadata_config { + node_metadata = "SECURE" + } + } +} +`, acctest.RandString(10), acctest.RandString(10)) +} + +func testAccContainerNodePool_withGPU() string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1c" { + zone = "us-central1-c" +} + +resource "google_container_cluster" "cluster" { + name = "tf-cluster-nodepool-test-%s" + zone = "us-central1-c" + initial_node_count = 1 + node_version = "${data.google_container_engine_versions.central1c.latest_node_version}" + min_master_version = "${data.google_container_engine_versions.central1c.latest_master_version}" +} +resource "google_container_node_pool" "np_with_gpu" { + name = "tf-nodepool-test-%s" + zone = "us-central1-c" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + node_config { + machine_type = "n1-standard-1" + disk_size_gb = 10 + oauth_scopes = [ + "https://www.googleapis.com/auth/devstorage.read_only", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/service.management.readonly", + "https://www.googleapis.com/auth/servicecontrol", + "https://www.googleapis.com/auth/trace.append" + ] + preemptible = true + service_account = "default" + image_type = "COS" + guest_accelerator = [ + { + type = "nvidia-tesla-k80" + count = 1 + } + ] + } +}`, acctest.RandString(10), acctest.RandString(10)) +} + +func testAccContainerNodePool_withNodeConfigScopeAlias() string { + return fmt.Sprintf(` +resource "google_container_cluster" "cluster" { + name = "tf-cluster-nodepool-test-%s" + zone = "us-central1-a" + initial_node_count = 1 +} +resource "google_container_node_pool" "np_with_node_config_scope_alias" { + name = "tf-nodepool-test-%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + node_config { + machine_type = "g1-small" + disk_size_gb = 10 + oauth_scopes = ["compute-rw", "storage-ro", "logging-write", "monitoring"] + } +}`, acctest.RandString(10), acctest.RandString(10)) +} + +func testAccContainerNodePool_version(cluster, np string) string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + zone = "us-central1-a" +} + +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + min_master_version = "${data.google_container_engine_versions.central1a.latest_master_version}" +} + +resource "google_container_node_pool" "np" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + + version = "${data.google_container_engine_versions.central1a.valid_node_versions.1}" +}`, cluster, np) +} + +func testAccContainerNodePool_updateVersion(cluster, np string) string { + return fmt.Sprintf(` +data "google_container_engine_versions" "central1a" { + zone = "us-central1-a" +} + +resource "google_container_cluster" "cluster" { + name = "%s" + zone = "us-central1-a" + initial_node_count = 1 + min_master_version = "${data.google_container_engine_versions.central1a.latest_master_version}" +} + +resource "google_container_node_pool" "np" { + name = "%s" + zone = "us-central1-a" + cluster = "${google_container_cluster.cluster.name}" + initial_node_count = 1 + + version = "${data.google_container_engine_versions.central1a.valid_node_versions.0}" +}`, cluster, np) +} diff --git a/provider/terraform/tests/resource_containeranalysis_note_generated_test.go b/provider/terraform/tests/resource_containeranalysis_note_generated_test.go new file mode 100644 index 000000000000..dc740356a298 --- /dev/null +++ b/provider/terraform/tests/resource_containeranalysis_note_generated_test.go @@ -0,0 +1,57 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccContainerAnalysisNote_containerAnalysisNoteBasicExample(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckContainerAnalysisNoteDestroy, + Steps: []resource.TestStep{ + { + Config: testAccContainerAnalysisNote_containerAnalysisNoteBasicExample(acctest.RandString(10)), + }, + { + ResourceName: "google_container_analysis_note.note", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccContainerAnalysisNote_containerAnalysisNoteBasicExample(val string) string { + return fmt.Sprintf(` +resource "google_container_analysis_note" "note" { + name = "test-attestor-note-%s" + attestation_authority { + hint { + human_readable_name = "Attestor Note" + } + } +} +`, val, + ) +} diff --git a/provider/terraform/tests/resource_dataflow_job_test.go b/provider/terraform/tests/resource_dataflow_job_test.go new file mode 100644 index 000000000000..5750893dc2d8 --- /dev/null +++ b/provider/terraform/tests/resource_dataflow_job_test.go @@ -0,0 +1,173 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDataflowJobCreate(t *testing.T) { + t.Parallel() + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataflowJobDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDataflowJob, + Check: resource.ComposeTestCheckFunc( + testAccDataflowJobExists( + "google_dataflow_job.big_data"), + ), + }, + }, + }) +} + +func TestAccDataflowJobRegionCreate(t *testing.T) { + t.Parallel() + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataflowJobRegionDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDataflowJobRegion, + Check: resource.ComposeTestCheckFunc( + testAccDataflowJobRegionExists( + "google_dataflow_job.big_data"), + ), + }, + }, + }) +} + +func testAccCheckDataflowJobDestroy(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_dataflow_job" { + continue + } + + config := testAccProvider.Meta().(*Config) + job, err := config.clientDataflow.Projects.Jobs.Get(config.Project, rs.Primary.ID).Do() + if job != nil { + if _, ok := dataflowTerminalStatesMap[job.CurrentState]; !ok { + return fmt.Errorf("Job still present") + } + } else if err != nil { + return err + } + } + + return nil +} + +func testAccCheckDataflowJobRegionDestroy(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_dataflow_job" { + continue + } + + config := testAccProvider.Meta().(*Config) + job, err := config.clientDataflow.Projects.Locations.Jobs.Get(config.Project, "us-central1", rs.Primary.ID).Do() + if job != nil { + if _, ok := dataflowTerminalStatesMap[job.CurrentState]; !ok { + return fmt.Errorf("Job still present") + } + } else if err != nil { + return err + } + } + + return nil +} + +func testAccDataflowJobExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + config := testAccProvider.Meta().(*Config) + _, err := config.clientDataflow.Projects.Jobs.Get(config.Project, rs.Primary.ID).Do() + if err != nil { + return fmt.Errorf("Job does not exist") + } + + return nil + } +} + +func testAccDataflowJobRegionExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + config := testAccProvider.Meta().(*Config) + _, err := config.clientDataflow.Projects.Locations.Jobs.Get(config.Project, "us-central1", rs.Primary.ID).Do() + if err != nil { + return fmt.Errorf("Job does not exist") + } + + return nil + } +} + +var testAccDataflowJob = fmt.Sprintf(` +resource "google_storage_bucket" "temp" { + name = "dfjob-test-%s-temp" + + force_destroy = true +} + +resource "google_dataflow_job" "big_data" { + name = "dfjob-test-%s" + + template_gcs_path = "gs://dataflow-templates/wordcount/template_file" + temp_gcs_location = "${google_storage_bucket.temp.url}" + + parameters { + inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt" + output = "${google_storage_bucket.temp.url}/output" + } + zone = "us-central1-f" + project = "%s" + + on_delete = "cancel" +}`, acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv()) + +var testAccDataflowJobRegion = fmt.Sprintf(` +resource "google_storage_bucket" "temp" { + name = "dfjob-test-%s-temp" + + force_destroy = true +} + +resource "google_dataflow_job" "big_data" { + name = "dfjob-test-%s" + + template_gcs_path = "gs://dataflow-templates/wordcount/template_file" + temp_gcs_location = "${google_storage_bucket.temp.url}" + + parameters { + inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt" + output = "${google_storage_bucket.temp.url}/output" + } + region = "us-central1" + zone = "us-central1-c" + project = "%s" + + on_delete = "cancel" +}`, acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv()) diff --git a/provider/terraform/tests/resource_dataproc_cluster_test.go b/provider/terraform/tests/resource_dataproc_cluster_test.go new file mode 100644 index 000000000000..089e9febe24d --- /dev/null +++ b/provider/terraform/tests/resource_dataproc_cluster_test.go @@ -0,0 +1,1062 @@ +package google + +import ( + "fmt" + "net/http" + "reflect" + "regexp" + "strconv" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "google.golang.org/api/dataproc/v1" + "google.golang.org/api/googleapi" +) + +func TestExtractInitTimeout(t *testing.T) { + t.Parallel() + + actual, err := extractInitTimeout("500s") + expected := 500 + if err != nil { + t.Fatalf("Expected %d, but got error %v", expected, err) + } + if actual != expected { + t.Fatalf("Expected %d, but got %d", expected, actual) + } +} + +func TestExtractInitTimeout_nonSeconds(t *testing.T) { + t.Parallel() + + actual, err := extractInitTimeout("5m") + expected := 300 + if err != nil { + t.Fatalf("Expected %d, but got error %v", expected, err) + } + if actual != expected { + t.Fatalf("Expected %d, but got %d", expected, actual) + } +} + +func TestExtractInitTimeout_empty(t *testing.T) { + t.Parallel() + + _, err := extractInitTimeout("") + expected := "time: invalid duration" + if err != nil && err.Error() != expected { + return + } + t.Fatalf("Expected an error with message '%s', but got %v", expected, err.Error()) +} + +func TestAccDataprocCluster_missingZoneGlobalRegion1(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckDataproc_missingZoneGlobalRegion1(rnd), + ExpectError: regexp.MustCompile("zone is mandatory when region is set to 'global'"), + }, + }, + }) +} + +func TestAccDataprocCluster_missingZoneGlobalRegion2(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccCheckDataproc_missingZoneGlobalRegion2(rnd), + ExpectError: regexp.MustCompile("zone is mandatory when region is set to 'global'"), + }, + }, + }) +} + +func TestAccDataprocCluster_basic(t *testing.T) { + t.Parallel() + + var cluster dataproc.Cluster + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_basic(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.basic", &cluster), + + // Default behaviour is for Dataproc to autogen or autodiscover a config bucket + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.bucket"), + + // Default behavior is for Dataproc to not use only internal IP addresses + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.gce_cluster_config.0.internal_ip_only", "false"), + + // Expect 1 master instances with computed values + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.master_config.#", "1"), + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.master_config.0.num_instances", "1"), + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.master_config.0.disk_config.0.boot_disk_size_gb"), + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.master_config.0.disk_config.0.num_local_ssds"), + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.master_config.0.disk_config.0.boot_disk_type"), + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.master_config.0.machine_type"), + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.master_config.0.instance_names.#", "1"), + + // Expect 2 worker instances with computed values + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.worker_config.#", "1"), + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.worker_config.0.num_instances", "2"), + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.worker_config.0.disk_config.0.boot_disk_size_gb"), + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.worker_config.0.disk_config.0.num_local_ssds"), + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.worker_config.0.disk_config.0.boot_disk_type"), + resource.TestCheckResourceAttrSet("google_dataproc_cluster.basic", "cluster_config.0.worker_config.0.machine_type"), + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.worker_config.0.instance_names.#", "2"), + + // Expect 0 preemptible worker instances + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.preemptible_worker_config.#", "1"), + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.preemptible_worker_config.0.num_instances", "0"), + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.preemptible_worker_config.0.instance_names.#", "0"), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_withInternalIpOnlyTrue(t *testing.T) { + t.Parallel() + + var cluster dataproc.Cluster + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withInternalIpOnlyTrue(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.basic", &cluster), + + // Testing behavior for Dataproc to use only internal IP addresses + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.gce_cluster_config.0.internal_ip_only", "true"), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_withMetadata(t *testing.T) { + t.Parallel() + + var cluster dataproc.Cluster + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withMetadata(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.basic", &cluster), + + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.gce_cluster_config.0.metadata.foo", "bar"), + resource.TestCheckResourceAttr("google_dataproc_cluster.basic", "cluster_config.0.gce_cluster_config.0.metadata.baz", "qux"), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_singleNodeCluster(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + var cluster dataproc.Cluster + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_singleNodeCluster(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.single_node_cluster", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.single_node_cluster", "cluster_config.0.master_config.0.num_instances", "1"), + resource.TestCheckResourceAttr("google_dataproc_cluster.single_node_cluster", "cluster_config.0.worker_config.0.num_instances", "0"), + + // We set the "dataproc:dataproc.allow.zero.workers" override property. + // GCP should populate the 'properties' value with this value, as well as many others + resource.TestCheckResourceAttrSet("google_dataproc_cluster.single_node_cluster", "cluster_config.0.software_config.0.properties.%"), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_updatable(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + var cluster dataproc.Cluster + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_updatable(rnd, 2, 1), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.updatable", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.updatable", "cluster_config.0.master_config.0.num_instances", "1"), + resource.TestCheckResourceAttr("google_dataproc_cluster.updatable", "cluster_config.0.worker_config.0.num_instances", "2"), + resource.TestCheckResourceAttr("google_dataproc_cluster.updatable", "cluster_config.0.preemptible_worker_config.0.num_instances", "1")), + }, + { + Config: testAccDataprocCluster_updatable(rnd, 3, 2), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_dataproc_cluster.updatable", "cluster_config.0.master_config.0.num_instances", "1"), + resource.TestCheckResourceAttr("google_dataproc_cluster.updatable", "cluster_config.0.worker_config.0.num_instances", "3"), + resource.TestCheckResourceAttr("google_dataproc_cluster.updatable", "cluster_config.0.preemptible_worker_config.0.num_instances", "2")), + }, + }, + }) +} + +func TestAccDataprocCluster_withStagingBucket(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + var cluster dataproc.Cluster + clusterName := fmt.Sprintf("dproc-cluster-test-%s", rnd) + bucketName := fmt.Sprintf("%s-bucket", clusterName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withStagingBucketAndCluster(clusterName, bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.with_bucket", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.with_bucket", "cluster_config.0.staging_bucket", bucketName), + resource.TestCheckResourceAttr("google_dataproc_cluster.with_bucket", "cluster_config.0.bucket", bucketName)), + }, + { + // Simulate destroy of cluster by removing it from definition, + // but leaving the storage bucket (should not be auto deleted) + Config: testAccDataprocCluster_withStagingBucketOnly(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocStagingBucketExists(bucketName), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_withInitAction(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + var cluster dataproc.Cluster + bucketName := fmt.Sprintf("dproc-cluster-test-%s-init-bucket", rnd) + objectName := "msg.txt" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withInitAction(rnd, bucketName, objectName), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.with_init_action", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.with_init_action", "cluster_config.0.initialization_action.#", "2"), + resource.TestCheckResourceAttr("google_dataproc_cluster.with_init_action", "cluster_config.0.initialization_action.0.timeout_sec", "500"), + testAccCheckDataprocClusterInitActionSucceeded(bucketName, objectName), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_withConfigOverrides(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + var cluster dataproc.Cluster + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withConfigOverrides(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.with_config_overrides", &cluster), + validateDataprocCluster_withConfigOverrides("google_dataproc_cluster.with_config_overrides", &cluster), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_withServiceAcc(t *testing.T) { + t.Parallel() + + sa := "a" + acctest.RandString(10) + saEmail := fmt.Sprintf("%s@%s.iam.gserviceaccount.com", sa, getTestProjectFromEnv()) + rnd := acctest.RandString(10) + + var cluster dataproc.Cluster + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withServiceAcc(sa, rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists( + "google_dataproc_cluster.with_service_account", &cluster), + testAccCheckDataprocClusterHasServiceScopes(t, &cluster, + "https://www.googleapis.com/auth/cloud.useraccounts.readonly", + "https://www.googleapis.com/auth/devstorage.read_write", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/monitoring", + ), + resource.TestCheckResourceAttr("google_dataproc_cluster.with_service_account", "cluster_config.0.gce_cluster_config.0.service_account", saEmail), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_withImageVersion(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + var cluster dataproc.Cluster + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withImageVersion(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.with_image_version", &cluster), + resource.TestCheckResourceAttr("google_dataproc_cluster.with_image_version", "cluster_config.0.software_config.0.image_version", "1.3.7-deb9"), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_withLabels(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + var cluster dataproc.Cluster + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withLabels(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocClusterExists("google_dataproc_cluster.with_labels", &cluster), + + // We only provide one, but GCP adds three, so expect 4. This means unfortunately a + // diff will exist unless the user adds these in. An alternative approach would + // be to follow the same approach as properties, i.e. split in into labels + // and override_labels + // + // The config is currently configured with ignore_changes = ["labels"] to handle this + // + resource.TestCheckResourceAttr("google_dataproc_cluster.with_labels", "labels.%", "4"), + resource.TestCheckResourceAttr("google_dataproc_cluster.with_labels", "labels.key1", "value1"), + ), + }, + }, + }) +} + +func TestAccDataprocCluster_withNetworkRefs(t *testing.T) { + t.Parallel() + + var c1, c2 dataproc.Cluster + rnd := acctest.RandString(10) + netName := fmt.Sprintf(`dproc-cluster-test-%s-net`, rnd) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocClusterDestroy(), + Steps: []resource.TestStep{ + { + Config: testAccDataprocCluster_withNetworkRefs(rnd, netName), + Check: resource.ComposeTestCheckFunc( + // successful creation of the clusters is good enough to assess it worked + testAccCheckDataprocClusterExists("google_dataproc_cluster.with_net_ref_by_url", &c1), + testAccCheckDataprocClusterExists("google_dataproc_cluster.with_net_ref_by_name", &c2), + ), + }, + }, + }) +} + +func testAccCheckDataprocClusterDestroy() resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_dataproc_cluster" { + continue + } + + if rs.Primary.ID == "" { + return fmt.Errorf("Unable to verify delete of dataproc cluster, ID is empty") + } + + attributes := rs.Primary.Attributes + project, err := getTestProject(rs.Primary, config) + if err != nil { + return err + } + + _, err = config.clientDataproc.Projects.Regions.Clusters.Get( + project, attributes["region"], rs.Primary.ID).Do() + + if err != nil { + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound { + return nil + } else if ok { + return fmt.Errorf("Error validating cluster deleted. Code: %d. Message: %s", gerr.Code, gerr.Message) + } + return fmt.Errorf("Error validating cluster deleted. %s", err.Error()) + } + return fmt.Errorf("Dataproc cluster still exists") + } + + return nil + } +} + +func testAccCheckDataprocClusterHasServiceScopes(t *testing.T, cluster *dataproc.Cluster, scopes ...string) func(s *terraform.State) error { + return func(s *terraform.State) error { + + if !reflect.DeepEqual(scopes, cluster.Config.GceClusterConfig.ServiceAccountScopes) { + return fmt.Errorf("Cluster does not contain expected set of service account scopes : %v : instead %v", + scopes, cluster.Config.GceClusterConfig.ServiceAccountScopes) + } + return nil + } +} + +func validateBucketExists(bucket string, config *Config) (bool, error) { + _, err := config.clientStorage.Buckets.Get(bucket).Do() + + if err != nil { + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound { + return false, nil + } else if ok { + return false, fmt.Errorf("Error validating bucket exists: http code error : %d, http message error: %s", gerr.Code, gerr.Message) + } + return false, fmt.Errorf("Error validating bucket exists: %s", err.Error()) + } + return true, nil +} + +func testAccCheckDataprocStagingBucketExists(bucketName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + config := testAccProvider.Meta().(*Config) + + exists, err := validateBucketExists(bucketName, config) + if err != nil { + return err + } + if !exists { + return fmt.Errorf("Staging Bucket %s does not exist", bucketName) + } + return nil + } + +} + +func testAccCheckDataprocClusterInitActionSucceeded(bucket, object string) resource.TestCheckFunc { + + // The init script will have created an object in the specified bucket. + // Ensure it exists + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + _, err := config.clientStorage.Objects.Get(bucket, object).Do() + if err != nil { + return fmt.Errorf("Unable to verify init action success: Error reading object %s in bucket %s: %v", object, bucket, err) + } + + return nil + } +} + +func validateDataprocCluster_withConfigOverrides(n string, cluster *dataproc.Cluster) resource.TestCheckFunc { + return func(s *terraform.State) error { + + type tfAndGCPTestField struct { + tfAttr string + expectedVal string + actualGCPVal string + } + + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Terraform resource Not found: %s", n) + } + + if cluster.Config.MasterConfig == nil || cluster.Config.WorkerConfig == nil || cluster.Config.SecondaryWorkerConfig == nil { + return fmt.Errorf("Master/Worker/SecondaryConfig values not set in GCP, expecting values") + } + + clusterTests := []tfAndGCPTestField{ + {"cluster_config.0.master_config.0.num_instances", "3", strconv.Itoa(int(cluster.Config.MasterConfig.NumInstances))}, + {"cluster_config.0.master_config.0.disk_config.0.boot_disk_size_gb", "10", strconv.Itoa(int(cluster.Config.MasterConfig.DiskConfig.BootDiskSizeGb))}, + {"cluster_config.0.master_config.0.disk_config.0.num_local_ssds", "0", strconv.Itoa(int(cluster.Config.MasterConfig.DiskConfig.NumLocalSsds))}, + {"cluster_config.0.master_config.0.disk_config.0.boot_disk_type", "pd-ssd", cluster.Config.MasterConfig.DiskConfig.BootDiskType}, + {"cluster_config.0.master_config.0.machine_type", "n1-standard-1", GetResourceNameFromSelfLink(cluster.Config.MasterConfig.MachineTypeUri)}, + {"cluster_config.0.master_config.0.instance_names.#", "3", strconv.Itoa(len(cluster.Config.MasterConfig.InstanceNames))}, + + {"cluster_config.0.worker_config.0.num_instances", "3", strconv.Itoa(int(cluster.Config.WorkerConfig.NumInstances))}, + {"cluster_config.0.worker_config.0.disk_config.0.boot_disk_size_gb", "11", strconv.Itoa(int(cluster.Config.WorkerConfig.DiskConfig.BootDiskSizeGb))}, + {"cluster_config.0.worker_config.0.disk_config.0.num_local_ssds", "1", strconv.Itoa(int(cluster.Config.WorkerConfig.DiskConfig.NumLocalSsds))}, + {"cluster_config.0.worker_config.0.disk_config.0.boot_disk_type", "pd-standard", cluster.Config.WorkerConfig.DiskConfig.BootDiskType}, + {"cluster_config.0.worker_config.0.machine_type", "n1-standard-1", GetResourceNameFromSelfLink(cluster.Config.WorkerConfig.MachineTypeUri)}, + {"cluster_config.0.worker_config.0.instance_names.#", "3", strconv.Itoa(len(cluster.Config.WorkerConfig.InstanceNames))}, + + {"cluster_config.0.preemptible_worker_config.0.num_instances", "1", strconv.Itoa(int(cluster.Config.SecondaryWorkerConfig.NumInstances))}, + {"cluster_config.0.preemptible_worker_config.0.disk_config.0.boot_disk_size_gb", "12", strconv.Itoa(int(cluster.Config.SecondaryWorkerConfig.DiskConfig.BootDiskSizeGb))}, + {"cluster_config.0.preemptible_worker_config.0.instance_names.#", "1", strconv.Itoa(len(cluster.Config.SecondaryWorkerConfig.InstanceNames))}, + } + + for _, attrs := range clusterTests { + tfVal := rs.Primary.Attributes[attrs.tfAttr] + if tfVal != attrs.expectedVal { + return fmt.Errorf("%s: Terraform Attribute value '%s' is not as expected '%s' ", attrs.tfAttr, tfVal, attrs.expectedVal) + } + if attrs.actualGCPVal != tfVal { + return fmt.Errorf("%s: Terraform Attribute value '%s' is not aligned with that in GCP '%s' ", attrs.tfAttr, tfVal, attrs.actualGCPVal) + } + } + + return nil + } +} + +func testAccCheckDataprocClusterExists(n string, cluster *dataproc.Cluster) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Terraform resource Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set for Dataproc cluster") + } + + config := testAccProvider.Meta().(*Config) + project, err := getTestProject(rs.Primary, config) + if err != nil { + return err + } + + found, err := config.clientDataproc.Projects.Regions.Clusters.Get( + project, rs.Primary.Attributes["region"], rs.Primary.ID).Do() + if err != nil { + return err + } + + if found.ClusterName != rs.Primary.ID { + return fmt.Errorf("Dataproc cluster %s not found, found %s instead", rs.Primary.ID, cluster.ClusterName) + } + + *cluster = *found + + return nil + } +} + +func testAccCheckDataproc_missingZoneGlobalRegion1(rnd string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "basic" { + name = "dproc-cluster-test-%s" + region = "global" +} +`, rnd) +} + +func testAccCheckDataproc_missingZoneGlobalRegion2(rnd string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "basic" { + name = "dproc-cluster-test-%s" + region = "global" + + cluster_config { + gce_cluster_config { } + } +} +`, rnd) +} + +func testAccDataprocCluster_basic(rnd string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "basic" { + name = "dproc-cluster-test-%s" + region = "us-central1" +} +`, rnd) +} + +func testAccDataprocCluster_withInternalIpOnlyTrue(rnd string) string { + return fmt.Sprintf(` +variable subnetwork_cidr { + default = "10.0.0.0/16" +} + +resource "google_compute_network" "dataproc_network" { + name = "dataproc-internalip-network-%s" + auto_create_subnetworks = false +} + +# +# Create a subnet with Private IP Access enabled to test +# deploying a Dataproc cluster with Internal IP Only enabled. +# +resource "google_compute_subnetwork" "dataproc_subnetwork" { + name = "dataproc-internalip-subnetwork-%s" + ip_cidr_range = "${var.subnetwork_cidr}" + network = "${google_compute_network.dataproc_network.self_link}" + region = "us-central1" + private_ip_google_access = true + } + +# +# The default network within GCP already comes pre configured with +# certain firewall rules open to allow internal communication. As we +# are creating a new one here for this test, we need to additionally +# open up similar rules to allow the nodes to talk to each other +# internally as part of their configuration or this will just hang. +# +resource "google_compute_firewall" "dataproc_network_firewall" { + name = "dproc-cluster-test-allow-internal" + description = "Firewall rules for dataproc Terraform acceptance testing" + network = "${google_compute_network.dataproc_network.name}" + + allow { + protocol = "icmp" + } + + allow { + protocol = "tcp" + ports = ["0-65535"] + } + + allow { + protocol = "udp" + ports = ["0-65535"] + } + + source_ranges = ["${var.subnetwork_cidr}"] +} +resource "google_dataproc_cluster" "basic" { + name = "dproc-cluster-test-%s" + region = "us-central1" + depends_on = ["google_compute_firewall.dataproc_network_firewall"] + + cluster_config { + gce_cluster_config { + subnetwork = "${google_compute_subnetwork.dataproc_subnetwork.name}" + internal_ip_only = true + } + } +} +`, rnd, rnd, rnd) +} + +func testAccDataprocCluster_withMetadata(rnd string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "basic" { + name = "dproc-cluster-test-%s" + region = "us-central1" + + cluster_config { + gce_cluster_config { + metadata { + foo = "bar" + baz = "qux" + } + } + } +} +`, rnd) +} + +func testAccDataprocCluster_singleNodeCluster(rnd string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "single_node_cluster" { + name = "dproc-cluster-test-%s" + region = "us-central1" + + cluster_config { + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + } + } + } +} +`, rnd) +} + +func testAccDataprocCluster_withConfigOverrides(rnd string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "with_config_overrides" { + name = "dproc-cluster-test-%s" + region = "us-central1" + + cluster_config { + + master_config { + num_instances = 3 + machine_type = "n1-standard-1" + disk_config { + boot_disk_type = "pd-ssd" + boot_disk_size_gb = 10 + } + } + + worker_config { + num_instances = 3 + machine_type = "n1-standard-1" + disk_config { + boot_disk_type = "pd-standard" + boot_disk_size_gb = 11 + num_local_ssds = 1 + } + } + + preemptible_worker_config { + num_instances = 1 + disk_config { + boot_disk_size_gb = 12 + } + } + } +}`, rnd) +} + +func testAccDataprocCluster_withInitAction(rnd, bucket, objName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "init_bucket" { + name = "%s" + force_destroy = "true" +} + +resource "google_storage_bucket_object" "init_script" { + name = "dproc-cluster-test-%s-init-script.sh" + bucket = "${google_storage_bucket.init_bucket.name}" + content = <> /tmp/%s +gsutil cp /tmp/%s ${google_storage_bucket.init_bucket.url} +EOL + +} + +resource "google_dataproc_cluster" "with_init_action" { + name = "dproc-cluster-test-%s" + region = "us-central1" + + cluster_config { + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + } + } + + master_config { + machine_type = "n1-standard-1" + disk_config { + boot_disk_size_gb = 10 + } + } + + initialization_action { + script = "${google_storage_bucket.init_bucket.url}/${google_storage_bucket_object.init_script.name}" + timeout_sec = 500 + } + initialization_action { + script = "${google_storage_bucket.init_bucket.url}/${google_storage_bucket_object.init_script.name}" + } + } +}`, bucket, rnd, objName, objName, rnd) +} + +func testAccDataprocCluster_updatable(rnd string, w, p int) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "updatable" { + name = "dproc-cluster-test-%s" + region = "us-central1" + + cluster_config { + + master_config { + num_instances = "1" + machine_type = "n1-standard-1" + disk_config { + boot_disk_size_gb = 10 + } + } + + worker_config { + num_instances = "%d" + machine_type = "n1-standard-1" + disk_config { + boot_disk_size_gb = 10 + } + } + + preemptible_worker_config { + num_instances = "%d" + disk_config { + boot_disk_size_gb = 10 + } + } + } + +}`, rnd, w, p) +} + +func testAccDataprocCluster_withStagingBucketOnly(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + force_destroy = "true" +}`, bucketName) +} + +func testAccDataprocCluster_withStagingBucketAndCluster(clusterName, bucketName string) string { + return fmt.Sprintf(` +%s + +resource "google_dataproc_cluster" "with_bucket" { + name = "%s" + region = "us-central1" + + cluster_config { + staging_bucket = "${google_storage_bucket.bucket.name}" + + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + } + } + + master_config { + machine_type = "n1-standard-1" + disk_config { + boot_disk_size_gb = 10 + } + } + } +}`, testAccDataprocCluster_withStagingBucketOnly(bucketName), clusterName) +} + +func testAccDataprocCluster_withLabels(rnd string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "with_labels" { + name = "dproc-cluster-test-%s" + region = "us-central1" + + labels { + key1 = "value1" + } + + # This is because GCP automatically adds its own labels as well. + # In this case we just want to test our newly added label is there + lifecycle { + ignore_changes = ["labels"] + } + +}`, rnd) +} + +func testAccDataprocCluster_withImageVersion(rnd string) string { + return fmt.Sprintf(` +resource "google_dataproc_cluster" "with_image_version" { + name = "dproc-cluster-test-%s" + region = "us-central1" + + cluster_config { + software_config { + image_version = "1.3.7-deb9" + } + } +}`, rnd) +} + +func testAccDataprocCluster_withServiceAcc(sa string, rnd string) string { + return fmt.Sprintf(` +resource "google_service_account" "service_account" { + account_id = "%s" +} + +resource "google_project_iam_member" "service_account" { + role = "roles/dataproc.worker" + member = "serviceAccount:${google_service_account.service_account.email}" +} + +resource "google_dataproc_cluster" "with_service_account" { + name = "dproc-cluster-test-%s" + region = "us-central1" + + cluster_config { + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + } + } + + master_config { + machine_type = "n1-standard-1" + disk_config { + boot_disk_size_gb = 10 + } + } + + gce_cluster_config { + service_account = "${google_service_account.service_account.email}" + service_account_scopes = [ + # User supplied scopes + "https://www.googleapis.com/auth/monitoring", + + # The following scopes necessary for the cluster to function properly are + # always added, even if not explicitly specified: + # useraccounts-ro: https://www.googleapis.com/auth/cloud.useraccounts.readonly + # storage-rw: https://www.googleapis.com/auth/devstorage.read_write + # logging-write: https://www.googleapis.com/auth/logging.write + "useraccounts-ro","storage-rw","logging-write" + ] + } + } + + depends_on = ["google_project_iam_member.service_account"] + +}`, sa, rnd) +} + +func testAccDataprocCluster_withNetworkRefs(rnd, netName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "dataproc_network" { + name = "%s" + auto_create_subnetworks = true +} + +# +# The default network within GCP already comes pre configured with +# certain firewall rules open to allow internal communication. As we +# are creating a new one here for this test, we need to additionally +# open up similar rules to allow the nodes to talk to each other +# internally as part of their configuration or this will just hang. +# +resource "google_compute_firewall" "dataproc_network_firewall" { + name = "dproc-cluster-test-%s-allow-internal" + description = "Firewall rules for dataproc Terraform acceptance testing" + network = "${google_compute_network.dataproc_network.name}" + + allow { + protocol = "icmp" + } + + allow { + protocol = "tcp" + ports = ["0-65535"] + } + + allow { + protocol = "udp" + ports = ["0-65535"] + } +} + +resource "google_dataproc_cluster" "with_net_ref_by_name" { + name = "dproc-cluster-test-%s-name" + region = "us-central1" + depends_on = ["google_compute_firewall.dataproc_network_firewall"] + + cluster_config { + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + } + } + + master_config { + machine_type = "n1-standard-1" + disk_config { + boot_disk_size_gb = 10 + } + } + + gce_cluster_config { + network = "${google_compute_network.dataproc_network.name}" + } + } +} + +resource "google_dataproc_cluster" "with_net_ref_by_url" { + name = "dproc-cluster-test-%s-url" + region = "us-central1" + depends_on = ["google_compute_firewall.dataproc_network_firewall"] + + cluster_config { + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + } + } + + master_config { + machine_type = "n1-standard-1" + disk_config { + boot_disk_size_gb = 10 + } + } + + gce_cluster_config { + network = "${google_compute_network.dataproc_network.self_link}" + } + } +} + +`, netName, rnd, rnd, rnd) +} diff --git a/provider/terraform/tests/resource_dataproc_job_test.go b/provider/terraform/tests/resource_dataproc_job_test.go new file mode 100644 index 000000000000..4fc6b0aa4b23 --- /dev/null +++ b/provider/terraform/tests/resource_dataproc_job_test.go @@ -0,0 +1,693 @@ +package google + +import ( + "fmt" + "io/ioutil" + "log" + "strings" + "testing" + + "regexp" + + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/dataproc/v1" + "google.golang.org/api/googleapi" +) + +type jobTestField struct { + tf_attr string + gcp_attr interface{} +} + +func TestAccDataprocJob_failForMissingJobConfig(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataprocJob_missingJobConf(), + ExpectError: regexp.MustCompile("You must define and configure exactly one xxx_config block"), + }, + }, + }) +} + +func TestAccDataprocJob_updatable(t *testing.T) { + t.Parallel() + + var job dataproc.Job + rnd := acctest.RandString(10) + jobId := fmt.Sprintf("dproc-update-job-id-%s", rnd) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataprocJob_updatable(rnd, jobId, "false"), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocJobExists("google_dataproc_job.updatable", &job), + resource.TestCheckResourceAttr("google_dataproc_job.updatable", "force_delete", "false"), + ), + }, + { + Config: testAccDataprocJob_updatable(rnd, jobId, "true"), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocJobExists("google_dataproc_job.updatable", &job), + resource.TestCheckResourceAttr("google_dataproc_job.updatable", "force_delete", "true"), + ), + }, + }, + }) +} + +func TestAccDataprocJob_PySpark(t *testing.T) { + t.Parallel() + + var job dataproc.Job + rnd := acctest.RandString(10) + jobId := fmt.Sprintf("dproc-custom-job-id-%s", rnd) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataprocJob_pySpark(rnd), + Check: resource.ComposeTestCheckFunc( + + testAccCheckDataprocJobExists("google_dataproc_job.pyspark", &job), + + // Custom supplied job_id + resource.TestCheckResourceAttr("google_dataproc_job.pyspark", "reference.0.job_id", jobId), + + // Autogenerated / computed values + resource.TestCheckResourceAttrSet("google_dataproc_job.pyspark", "status.0.state"), + resource.TestCheckResourceAttrSet("google_dataproc_job.pyspark", "status.0.state_start_time"), + resource.TestCheckResourceAttr("google_dataproc_job.pyspark", "scheduling.0.max_failures_per_hour", "1"), + resource.TestCheckResourceAttr("google_dataproc_job.pyspark", "labels.one", "1"), + + // Unique job config + testAccCheckDataprocJobAttrMatch( + "google_dataproc_job.pyspark", "pyspark_config", &job), + + // Wait until job completes successfully + testAccCheckDataprocJobCompletesSuccessfully("google_dataproc_job.pyspark", &job), + ), + }, + }, + }) +} + +func TestAccDataprocJob_Spark(t *testing.T) { + t.Parallel() + + var job dataproc.Job + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataprocJob_spark(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocJobExists("google_dataproc_job.spark", &job), + + // Autogenerated / computed values + resource.TestCheckResourceAttrSet("google_dataproc_job.spark", "reference.0.job_id"), + resource.TestCheckResourceAttrSet("google_dataproc_job.spark", "status.0.state"), + resource.TestCheckResourceAttrSet("google_dataproc_job.spark", "status.0.state_start_time"), + + // Unique job config + testAccCheckDataprocJobAttrMatch( + "google_dataproc_job.spark", "spark_config", &job), + + // Wait until job completes successfully + testAccCheckDataprocJobCompletesSuccessfully("google_dataproc_job.spark", &job), + ), + }, + }, + }) +} + +func TestAccDataprocJob_Hadoop(t *testing.T) { + t.Parallel() + + var job dataproc.Job + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataprocJob_hadoop(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocJobExists("google_dataproc_job.hadoop", &job), + + // Autogenerated / computed values + resource.TestCheckResourceAttrSet("google_dataproc_job.hadoop", "reference.0.job_id"), + resource.TestCheckResourceAttrSet("google_dataproc_job.hadoop", "status.0.state"), + resource.TestCheckResourceAttrSet("google_dataproc_job.hadoop", "status.0.state_start_time"), + + // Unique job config + testAccCheckDataprocJobAttrMatch( + "google_dataproc_job.hadoop", "hadoop_config", &job), + + // Wait until job completes successfully + testAccCheckDataprocJobCompletesSuccessfully("google_dataproc_job.hadoop", &job), + ), + }, + }, + }) +} + +func TestAccDataprocJob_Hive(t *testing.T) { + t.Parallel() + + var job dataproc.Job + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataprocJob_hive(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocJobExists("google_dataproc_job.hive", &job), + + // Autogenerated / computed values + resource.TestCheckResourceAttrSet("google_dataproc_job.hive", "reference.0.job_id"), + resource.TestCheckResourceAttrSet("google_dataproc_job.hive", "status.0.state"), + resource.TestCheckResourceAttrSet("google_dataproc_job.hive", "status.0.state_start_time"), + + // Unique job config + testAccCheckDataprocJobAttrMatch( + "google_dataproc_job.hive", "hive_config", &job), + + // Wait until job completes successfully + testAccCheckDataprocJobCompletesSuccessfully("google_dataproc_job.hive", &job), + ), + }, + }, + }) +} + +func TestAccDataprocJob_Pig(t *testing.T) { + t.Parallel() + + var job dataproc.Job + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataprocJob_pig(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocJobExists("google_dataproc_job.pig", &job), + + // Autogenerated / computed values + resource.TestCheckResourceAttrSet("google_dataproc_job.pig", "reference.0.job_id"), + resource.TestCheckResourceAttrSet("google_dataproc_job.pig", "status.0.state"), + resource.TestCheckResourceAttrSet("google_dataproc_job.pig", "status.0.state_start_time"), + + // Unique job config + testAccCheckDataprocJobAttrMatch( + "google_dataproc_job.pig", "pig_config", &job), + + // Wait until job completes successfully + testAccCheckDataprocJobCompletesSuccessfully("google_dataproc_job.pig", &job), + ), + }, + }, + }) +} + +func TestAccDataprocJob_SparkSql(t *testing.T) { + t.Parallel() + + var job dataproc.Job + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataprocJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataprocJob_sparksql(rnd), + Check: resource.ComposeTestCheckFunc( + testAccCheckDataprocJobExists("google_dataproc_job.sparksql", &job), + + // Autogenerated / computed values + resource.TestCheckResourceAttrSet("google_dataproc_job.sparksql", "reference.0.job_id"), + resource.TestCheckResourceAttrSet("google_dataproc_job.sparksql", "status.0.state"), + resource.TestCheckResourceAttrSet("google_dataproc_job.sparksql", "status.0.state_start_time"), + + // Unique job config + testAccCheckDataprocJobAttrMatch( + "google_dataproc_job.sparksql", "sparksql_config", &job), + + // Wait until job completes successfully + testAccCheckDataprocJobCompletesSuccessfully("google_dataproc_job.sparksql", &job), + ), + }, + }, + }) +} + +func testAccCheckDataprocJobDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_dataproc_job" { + continue + } + + if rs.Primary.ID == "" { + return fmt.Errorf("Unable to verify delete of dataproc job ID is empty") + } + attributes := rs.Primary.Attributes + + project, err := getTestProject(rs.Primary, config) + if err != nil { + return err + } + + _, err = config.clientDataproc.Projects.Regions.Jobs.Get( + project, attributes["region"], rs.Primary.ID).Do() + if err != nil { + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + return nil + } else if ok { + return fmt.Errorf("Error making GCP platform call: http code error : %d, http message error: %s", gerr.Code, gerr.Message) + } + return fmt.Errorf("Error making GCP platform call: %s", err.Error()) + } + return fmt.Errorf("Dataproc job still exists") + } + + return nil +} + +func testAccCheckDataprocJobCompletesSuccessfully(n string, job *dataproc.Job) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + attributes := s.RootModule().Resources[n].Primary.Attributes + region := attributes["region"] + project, err := getTestProject(s.RootModule().Resources[n].Primary, config) + if err != nil { + return err + } + + jobCompleteTimeoutMins := 5 + waitErr := dataprocJobOperationWait(config, region, project, job.Reference.JobId, + "Awaiting Dataproc job completion", jobCompleteTimeoutMins, 1) + if waitErr != nil { + return waitErr + } + + completeJob, err := config.clientDataproc.Projects.Regions.Jobs.Get( + project, region, job.Reference.JobId).Do() + if err != nil { + return err + } + if completeJob.Status.State == "ERROR" { + if !strings.HasPrefix(completeJob.DriverOutputResourceUri, "gs://") { + return fmt.Errorf("Job completed in ERROR state but no valid log URI found") + } + u := strings.SplitN(strings.TrimPrefix(completeJob.DriverOutputResourceUri, "gs://"), "/", 2) + if len(u) != 2 { + return fmt.Errorf("Job completed in ERROR state but no valid log URI found") + } + l, err := config.clientStorage.Objects.List(u[0]).Prefix(u[1]).Do() + if err != nil { + return errwrap.Wrapf("Job completed in ERROR state, found error when trying to list logs: {{err}}", err) + } + for _, item := range l.Items { + resp, err := config.clientStorage.Objects.Get(item.Bucket, item.Name).Download() + if err != nil { + return errwrap.Wrapf("Job completed in ERROR state, found error when trying to read logs: {{err}}", err) + } + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return errwrap.Wrapf("Job completed in ERROR state, found error when trying to read logs: {{err}}", err) + } + log.Printf("[ERROR] Job failed, driver logs:\n%s", body) + } + return fmt.Errorf("Job completed in ERROR state, check logs for details") + } else if completeJob.Status.State != "DONE" { + return fmt.Errorf("Job did not complete successfully, instead status: %s", completeJob.Status.State) + } + + return nil + } +} + +func testAccCheckDataprocJobExists(n string, job *dataproc.Job) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Terraform resource Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set for Dataproc job") + } + + config := testAccProvider.Meta().(*Config) + jobId := s.RootModule().Resources[n].Primary.ID + project, err := getTestProject(s.RootModule().Resources[n].Primary, config) + if err != nil { + return err + } + + found, err := config.clientDataproc.Projects.Regions.Jobs.Get( + project, rs.Primary.Attributes["region"], jobId).Do() + if err != nil { + return err + } + + *job = *found + + return nil + } +} + +func testAccCheckDataprocJobAttrMatch(n, jobType string, job *dataproc.Job) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + + jobTests := []jobTestField{} + if jobType == "pyspark_config" { + jobTests = append(jobTests, jobTestField{"pyspark_config.0.main_python_file_uri", job.PysparkJob.MainPythonFileUri}) + jobTests = append(jobTests, jobTestField{"pyspark_config.0.args", job.PysparkJob.Args}) + jobTests = append(jobTests, jobTestField{"pyspark_config.0.python_file_uris", job.PysparkJob.PythonFileUris}) + jobTests = append(jobTests, jobTestField{"pyspark_config.0.jar_file_uris", job.PysparkJob.JarFileUris}) + jobTests = append(jobTests, jobTestField{"pyspark_config.0.file_uris", job.PysparkJob.FileUris}) + jobTests = append(jobTests, jobTestField{"pyspark_config.0.archive_uris", job.PysparkJob.ArchiveUris}) + jobTests = append(jobTests, jobTestField{"pyspark_config.0.properties", job.PysparkJob.Properties}) + jobTests = append(jobTests, jobTestField{"pyspark_config.0.logging_config.0.driver_log_levels", job.PysparkJob.LoggingConfig.DriverLogLevels}) + } + if jobType == "spark_config" { + jobTests = append(jobTests, jobTestField{"spark_config.0.main_class", job.SparkJob.MainClass}) + jobTests = append(jobTests, jobTestField{"spark_config.0.main_jar_file_uri", job.SparkJob.MainJarFileUri}) + jobTests = append(jobTests, jobTestField{"spark_config.0.args", job.SparkJob.Args}) + jobTests = append(jobTests, jobTestField{"spark_config.0.jar_file_uris", job.SparkJob.JarFileUris}) + jobTests = append(jobTests, jobTestField{"spark_config.0.file_uris", job.SparkJob.FileUris}) + jobTests = append(jobTests, jobTestField{"spark_config.0.archive_uris", job.SparkJob.ArchiveUris}) + jobTests = append(jobTests, jobTestField{"spark_config.0.properties", job.SparkJob.Properties}) + jobTests = append(jobTests, jobTestField{"spark_config.0.logging_config.0.driver_log_levels", job.SparkJob.LoggingConfig.DriverLogLevels}) + } + if jobType == "hadoop_config" { + jobTests = append(jobTests, jobTestField{"hadoop_config.0.main_class", job.HadoopJob.MainClass}) + jobTests = append(jobTests, jobTestField{"hadoop_config.0.main_jar_file_uri", job.HadoopJob.MainJarFileUri}) + jobTests = append(jobTests, jobTestField{"hadoop_config.0.args", job.HadoopJob.Args}) + jobTests = append(jobTests, jobTestField{"hadoop_config.0.jar_file_uris", job.HadoopJob.JarFileUris}) + jobTests = append(jobTests, jobTestField{"hadoop_config.0.file_uris", job.HadoopJob.FileUris}) + jobTests = append(jobTests, jobTestField{"hadoop_config.0.archive_uris", job.HadoopJob.ArchiveUris}) + jobTests = append(jobTests, jobTestField{"hadoop_config.0.properties", job.HadoopJob.Properties}) + jobTests = append(jobTests, jobTestField{"hadoop_config.0.logging_config.0.driver_log_levels", job.HadoopJob.LoggingConfig.DriverLogLevels}) + } + if jobType == "hive_config" { + queries := []string{} + if job.HiveJob.QueryList != nil { + queries = job.HiveJob.QueryList.Queries + } + jobTests = append(jobTests, jobTestField{"hive_config.0.query_list", queries}) + jobTests = append(jobTests, jobTestField{"hive_config.0.query_file_uri", job.HiveJob.QueryFileUri}) + jobTests = append(jobTests, jobTestField{"hive_config.0.continue_on_failure", job.HiveJob.ContinueOnFailure}) + jobTests = append(jobTests, jobTestField{"hive_config.0.script_variables", job.HiveJob.ScriptVariables}) + jobTests = append(jobTests, jobTestField{"hive_config.0.properties", job.HiveJob.Properties}) + jobTests = append(jobTests, jobTestField{"hive_config.0.jar_file_uris", job.HiveJob.JarFileUris}) + } + if jobType == "pig_config" { + queries := []string{} + if job.PigJob.QueryList != nil { + queries = job.PigJob.QueryList.Queries + } + jobTests = append(jobTests, jobTestField{"pig_config.0.query_list", queries}) + jobTests = append(jobTests, jobTestField{"pig_config.0.query_file_uri", job.PigJob.QueryFileUri}) + jobTests = append(jobTests, jobTestField{"pig_config.0.continue_on_failure", job.PigJob.ContinueOnFailure}) + jobTests = append(jobTests, jobTestField{"pig_config.0.script_variables", job.PigJob.ScriptVariables}) + jobTests = append(jobTests, jobTestField{"pig_config.0.properties", job.PigJob.Properties}) + jobTests = append(jobTests, jobTestField{"pig_config.0.jar_file_uris", job.PigJob.JarFileUris}) + } + if jobType == "sparksql_config" { + queries := []string{} + if job.SparkSqlJob.QueryList != nil { + queries = job.SparkSqlJob.QueryList.Queries + } + jobTests = append(jobTests, jobTestField{"sparksql_config.0.query_list", queries}) + jobTests = append(jobTests, jobTestField{"sparksql_config.0.query_file_uri", job.SparkSqlJob.QueryFileUri}) + jobTests = append(jobTests, jobTestField{"sparksql_config.0.script_variables", job.SparkSqlJob.ScriptVariables}) + jobTests = append(jobTests, jobTestField{"sparksql_config.0.properties", job.SparkSqlJob.Properties}) + jobTests = append(jobTests, jobTestField{"sparksql_config.0.jar_file_uris", job.SparkSqlJob.JarFileUris}) + } + + for _, attrs := range jobTests { + if c := checkMatch(attributes, attrs.tf_attr, attrs.gcp_attr); c != "" { + return fmt.Errorf(c) + } + } + + return nil + } +} + +func testAccDataprocJob_missingJobConf() string { + return ` +resource "google_dataproc_job" "missing_config" { + placement { + cluster_name = "na" + } + + force_delete = true +}` +} + +var singleNodeClusterConfig = ` +resource "google_dataproc_cluster" "basic" { + name = "dproc-job-test-%s" + region = "us-central1" + + cluster_config { + # Keep the costs down with smallest config we can get away with + software_config { + override_properties = { + "dataproc:dataproc.allow.zero.workers" = "true" + } + } + + master_config { + num_instances = 1 + machine_type = "n1-standard-1" + disk_config { + boot_disk_size_gb = 10 + } + } + } +} +` + +func testAccDataprocJob_updatable(rnd, jobId, del string) string { + return fmt.Sprintf( + singleNodeClusterConfig+` + +resource "google_dataproc_job" "updatable" { + + placement { + cluster_name = "${google_dataproc_cluster.basic.name}" + } + reference { + job_id = "%s" + } + + region = "${google_dataproc_cluster.basic.region}" + force_delete = %s + + pyspark_config { + main_python_file_uri = "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py" + } +} +`, rnd, jobId, del) +} + +func testAccDataprocJob_pySpark(rnd string) string { + return fmt.Sprintf( + singleNodeClusterConfig+` + +resource "google_dataproc_job" "pyspark" { + + placement { + cluster_name = "${google_dataproc_cluster.basic.name}" + } + reference { + job_id = "dproc-custom-job-id-%s" + } + + region = "${google_dataproc_cluster.basic.region}" + force_delete = true + + pyspark_config { + main_python_file_uri = "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py" + properties = { + "spark.logConf" = "true" + } + logging_config { + driver_log_levels { + "root" = "INFO" + } + } + } + + scheduling { + max_failures_per_hour = 1 + } + + labels { + one = "1" + } +} +`, rnd, rnd) +} + +func testAccDataprocJob_spark(rnd string) string { + return fmt.Sprintf( + singleNodeClusterConfig+` + + resource "google_dataproc_job" "spark" { + + region = "${google_dataproc_cluster.basic.region}" + force_delete = true + placement { + cluster_name = "${google_dataproc_cluster.basic.name}" + } + + spark_config { + main_class = "org.apache.spark.examples.SparkPi" + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + args = ["1000"] + properties = { + "spark.logConf" = "true" + } + } + } + `, rnd) + +} + +func testAccDataprocJob_hadoop(rnd string) string { + return fmt.Sprintf( + singleNodeClusterConfig+` + + resource "google_dataproc_job" "hadoop" { + + region = "${google_dataproc_cluster.basic.region}" + force_delete = true + placement { + cluster_name = "${google_dataproc_cluster.basic.name}" + } + + hadoop_config { + main_jar_file_uri = "file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar" + args = [ + "wordcount", + "file:///usr/lib/spark/NOTICE", + "gs://${google_dataproc_cluster.basic.cluster_config.0.bucket}/hadoopjob_output" + ] + } + + } + `, rnd) + +} + +func testAccDataprocJob_hive(rnd string) string { + return fmt.Sprintf( + singleNodeClusterConfig+` + + resource "google_dataproc_job" "hive" { + + region = "${google_dataproc_cluster.basic.region}" + force_delete = true + placement { + cluster_name = "${google_dataproc_cluster.basic.name}" + } + + hive_config { + query_list = [ + "DROP TABLE IF EXISTS dprocjob_test", + "CREATE EXTERNAL TABLE dprocjob_test(bar int) LOCATION 'gs://${google_dataproc_cluster.basic.cluster_config.0.bucket}/hive_dprocjob_test/'", + "SELECT * FROM dprocjob_test WHERE bar > 2", + ] + } + + } + `, rnd) + +} + +func testAccDataprocJob_pig(rnd string) string { + return fmt.Sprintf( + singleNodeClusterConfig+` + + resource "google_dataproc_job" "pig" { + + region = "${google_dataproc_cluster.basic.region}" + force_delete = true + placement { + cluster_name = "${google_dataproc_cluster.basic.name}" + } + + pig_config { + query_list = [ + "LNS = LOAD 'file:///usr/lib/pig/LICENSE.txt ' AS (line)", + "WORDS = FOREACH LNS GENERATE FLATTEN(TOKENIZE(line)) AS word", + "GROUPS = GROUP WORDS BY word", + "WORD_COUNTS = FOREACH GROUPS GENERATE group, COUNT(WORDS)", + "DUMP WORD_COUNTS" + ] + } + } + `, rnd) + +} + +func testAccDataprocJob_sparksql(rnd string) string { + return fmt.Sprintf( + singleNodeClusterConfig+` + + resource "google_dataproc_job" "sparksql" { + + region = "${google_dataproc_cluster.basic.region}" + force_delete = true + placement { + cluster_name = "${google_dataproc_cluster.basic.name}" + } + + sparksql_config { + query_list = [ + "DROP TABLE IF EXISTS dprocjob_test", + "CREATE TABLE dprocjob_test(bar int)", + "SELECT * FROM dprocjob_test WHERE bar > 2", + ] + } + } + `, rnd) + +} diff --git a/provider/terraform/tests/resource_dns_managed_zone_test.go b/provider/terraform/tests/resource_dns_managed_zone_test.go new file mode 100644 index 000000000000..372f2221b350 --- /dev/null +++ b/provider/terraform/tests/resource_dns_managed_zone_test.go @@ -0,0 +1,202 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDnsManagedZone_basic(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsManagedZoneDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsManagedZone_basic(zoneSuffix, "description1"), + }, + resource.TestStep{ + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDnsManagedZone_update(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsManagedZoneDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsManagedZone_basic(zoneSuffix, "description1"), + }, + resource.TestStep{ + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + Config: testAccDnsManagedZone_basic(zoneSuffix, "description2"), + }, + resource.TestStep{ + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckDnsManagedZoneDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_dns_zone" { + continue + } + + _, err := config.clientDns.ManagedZones.Get( + config.Project, rs.Primary.ID).Do() + if err == nil { + return fmt.Errorf("DNS ManagedZone still exists") + } + } + + return nil +} + +func testAccDnsManagedZone_basic(suffix, description string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + description = "%s" + labels = { + foo = "bar" + } +}`, suffix, suffix, description) +} + +func TestDnsManagedZoneImport_parseImportId(t *testing.T) { + zoneRegexes := []string{ + "projects/(?P[^/]+)/managedZones/(?P[^/]+)", + "(?P[^/]+)/managedZones/(?P[^/]+)", + "(?P[^/]+)", + } + + cases := map[string]struct { + ImportId string + IdRegexes []string + Config *Config + ExpectedSchemaValues map[string]interface{} + ExpectError bool + }{ + "full self_link": { + IdRegexes: zoneRegexes, + ImportId: "https://www.googleapis.com/dns/v1/projects/my-project/managedZones/my-zone", + ExpectedSchemaValues: map[string]interface{}{ + "project": "my-project", + "name": "my-zone", + }, + }, + "relative self_link": { + IdRegexes: zoneRegexes, + ImportId: "projects/my-project/managedZones/my-zone", + ExpectedSchemaValues: map[string]interface{}{ + "project": "my-project", + "name": "my-zone", + }, + }, + "short id": { + IdRegexes: zoneRegexes, + ImportId: "my-project/managedZones/my-zone", + ExpectedSchemaValues: map[string]interface{}{ + "project": "my-project", + "name": "my-zone", + }, + }, + "short id with default project and region": { + IdRegexes: zoneRegexes, + ImportId: "my-zone", + Config: &Config{ + Project: "default-project", + }, + ExpectedSchemaValues: map[string]interface{}{ + "project": "default-project", + "name": "my-zone", + }, + }, + } + + for tn, tc := range cases { + d := &ResourceDataMock{ + FieldsInSchema: make(map[string]interface{}), + id: tc.ImportId, + } + config := tc.Config + if config == nil { + config = &Config{} + } + // + if err := parseImportId(tc.IdRegexes, d, config); err == nil { + for k, expectedValue := range tc.ExpectedSchemaValues { + if v, ok := d.GetOk(k); ok { + if v != expectedValue { + t.Errorf("%s failed; Expected value %q for field %q, got %q", tn, expectedValue, k, v) + } + } else { + t.Errorf("%s failed; Expected a value for field %q", tn, k) + } + } + } else if !tc.ExpectError { + t.Errorf("%s failed; unexpected error: %s", tn, err) + } + } +} + +func TestAccDnsManagedZone_importWithProject(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(10) + project := getTestProjectFromEnv() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsManagedZoneDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsManagedZone_basicWithProject(zoneSuffix, "description1", project), + }, + resource.TestStep{ + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDnsManagedZone_basicWithProject(suffix, description, project string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + description = "%s" + project = "%s" +}`, suffix, suffix, description, project) +} diff --git a/provider/terraform/tests/resource_dns_record_set_test.go b/provider/terraform/tests/resource_dns_record_set_test.go new file mode 100644 index 000000000000..763a46fdac8c --- /dev/null +++ b/provider/terraform/tests/resource_dns_record_set_test.go @@ -0,0 +1,366 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestIpv6AddressDiffSuppress(t *testing.T) { + cases := map[string]struct { + Old, New string + ShouldSuppress bool + }{ + "compact form should suppress diff": { + Old: "2a03:b0c0:1:e0::29b:8001", + New: "2a03:b0c0:0001:00e0:0000:0000:029b:8001", + ShouldSuppress: true, + }, + "different address should not suppress diff": { + Old: "2a03:b0c0:1:e00::29b:8001", + New: "2a03:b0c0:0001:00e0:0000:0000:029b:8001", + ShouldSuppress: false, + }, + } + + for tn, tc := range cases { + shouldSuppress := ipv6AddressDiffSuppress("", tc.Old, tc.New, nil) + if shouldSuppress != tc.ShouldSuppress { + t.Errorf("%s: expected %t", tn, tc.ShouldSuppress) + } + } +} + +func TestAccDnsRecordSet_basic(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsRecordSetDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + resource.TestStep{ + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/test-record.%s.hashicorptest.com./A", zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDnsRecordSet_modify(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsRecordSetDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + resource.TestStep{ + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.11", 300), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + resource.TestStep{ + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.11", 600), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + }, + }) +} + +func TestAccDnsRecordSet_changeType(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsRecordSetDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + resource.TestStep{ + Config: testAccDnsRecordSet_bigChange(zoneName, 600), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + }, + }) +} + +func TestAccDnsRecordSet_ns(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-ns-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsRecordSetDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsRecordSet_ns(zoneName, 300), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + resource.TestStep{ + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s.hashicorptest.com./NS", zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDnsRecordSet_nestedNS(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-ns-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsRecordSetDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsRecordSet_nestedNS(zoneName, 300), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + }, + }) +} + +func TestAccDnsRecordSet_quotedTXT(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-txt-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsRecordSetDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsRecordSet_quotedTXT(zoneName, 300), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + }, + }) +} + +func TestAccDnsRecordSet_uppercaseMX(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-txt-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDnsRecordSetDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccDnsRecordSet_uppercaseMX(zoneName, 300), + Check: resource.ComposeTestCheckFunc( + testAccCheckDnsRecordSetExists( + "google_dns_record_set.foobar", zoneName), + ), + }, + }, + }) +} + +func testAccCheckDnsRecordSetDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + // Deletion of the managed_zone implies everything is gone + if rs.Type == "google_dns_managed_zone" { + _, err := config.clientDns.ManagedZones.Get( + config.Project, rs.Primary.ID).Do() + if err == nil { + return fmt.Errorf("DNS ManagedZone still exists") + } + } + } + + return nil +} + +func testAccCheckDnsRecordSetExists(resourceType, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceType] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + dnsName := rs.Primary.Attributes["name"] + dnsType := rs.Primary.Attributes["type"] + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + resp, err := config.clientDns.ResourceRecordSets.List( + config.Project, resourceName).Name(dnsName).Type(dnsType).Do() + if err != nil { + return fmt.Errorf("Error confirming DNS RecordSet existence: %#v", err) + } + switch len(resp.Rrsets) { + case 0: + // The resource doesn't exist anymore + return fmt.Errorf("DNS RecordSet not found") + case 1: + return nil + default: + return fmt.Errorf("Only expected 1 record set, got %d", len(resp.Rrsets)) + } + } +} + +func testAccDnsRecordSet_basic(zoneName string, addr2 string, ttl int) string { + return fmt.Sprintf(` + resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + } + resource "google_dns_record_set" "foobar" { + managed_zone = "${google_dns_managed_zone.parent-zone.name}" + name = "test-record.%s.hashicorptest.com." + type = "A" + rrdatas = ["127.0.0.1", "%s"] + ttl = %d + } + `, zoneName, zoneName, zoneName, addr2, ttl) +} + +func testAccDnsRecordSet_ns(name string, ttl int) string { + return fmt.Sprintf(` + resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + } + resource "google_dns_record_set" "foobar" { + managed_zone = "${google_dns_managed_zone.parent-zone.name}" + name = "%s.hashicorptest.com." + type = "NS" + rrdatas = ["ns.hashicorp.services.", "ns2.hashicorp.services."] + ttl = %d + } + `, name, name, name, ttl) +} + +func testAccDnsRecordSet_nestedNS(name string, ttl int) string { + return fmt.Sprintf(` + resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + } + resource "google_dns_record_set" "foobar" { + managed_zone = "${google_dns_managed_zone.parent-zone.name}" + name = "nested.%s.hashicorptest.com." + type = "NS" + rrdatas = ["ns.hashicorp.services.", "ns2.hashicorp.services."] + ttl = %d + } + `, name, name, name, ttl) +} + +func testAccDnsRecordSet_bigChange(zoneName string, ttl int) string { + return fmt.Sprintf(` + resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + } + resource "google_dns_record_set" "foobar" { + managed_zone = "${google_dns_managed_zone.parent-zone.name}" + name = "test-record.%s.hashicorptest.com." + type = "CNAME" + rrdatas = ["www.terraform.io."] + ttl = %d + } + `, zoneName, zoneName, zoneName, ttl) +} + +func testAccDnsRecordSet_quotedTXT(name string, ttl int) string { + return fmt.Sprintf(` + resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + } + resource "google_dns_record_set" "foobar" { + managed_zone = "${google_dns_managed_zone.parent-zone.name}" + name = "test-record.%s.hashicorptest.com." + type = "TXT" + rrdatas = ["test", "\"quoted test\""] + ttl = %d + } + `, name, name, name, ttl) +} + +func testAccDnsRecordSet_uppercaseMX(name string, ttl int) string { + return fmt.Sprintf(` + resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + } + resource "google_dns_record_set" "foobar" { + managed_zone = "${google_dns_managed_zone.parent-zone.name}" + name = "test-record.%s.hashicorptest.com." + type = "MX" + rrdatas = [ + "1 ASPMX.L.GOOGLE.COM.", + "5 ALT1.ASPMX.L.GOOGLE.COM.", + "5 ALT2.ASPMX.L.GOOGLE.COM.", + "10 ASPMX2.GOOGLEMAIL.COM.", + "10 ASPMX3.GOOGLEMAIL.COM.", + ] + ttl = %d + } + `, name, name, name, ttl) +} diff --git a/provider/terraform/tests/resource_endpoints_service_test.go b/provider/terraform/tests/resource_endpoints_service_test.go new file mode 100644 index 000000000000..4df87cb9dea5 --- /dev/null +++ b/provider/terraform/tests/resource_endpoints_service_test.go @@ -0,0 +1,177 @@ +package google + +import ( + "reflect" + "testing" + + "fmt" + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/servicemanagement/v1" +) + +func TestAccEndpointsService_basic(t *testing.T) { + t.Parallel() + random_name := "t-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccEndpointsService_basic(random_name), + Check: testAccCheckEndpointExistsByName(random_name), + }, + }, + }) +} + +func TestAccEndpointsService_grpc(t *testing.T) { + t.Parallel() + random_name := "t-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccEndpointsService_grpc(random_name), + Check: testAccCheckEndpointExistsByName(random_name), + }, + }, + }) +} + +func TestEndpointsService_grpcMigrateState(t *testing.T) { + cases := map[string]struct { + StateVersion int + Attributes map[string]string + ExpectedAttributes map[string]string + Meta interface{} + }{ + "update from protoc_output to protoc_output_base64": { + StateVersion: 0, + Attributes: map[string]string{ + "protoc_output": "123456789", + "name": "testcase", + }, + ExpectedAttributes: map[string]string{ + "protoc_output_base64": "MTIzNDU2Nzg5", + "protoc_output": "", + "name": "testcase", + }, + Meta: &Config{Project: "gcp-project", Region: "us-central1"}, + }, + "update from non-protoc_output": { + StateVersion: 0, + Attributes: map[string]string{ + "openapi_config": "foo bar baz", + "name": "testcase-2", + }, + ExpectedAttributes: map[string]string{ + "openapi_config": "foo bar baz", + "name": "testcase-2", + }, + Meta: &Config{Project: "gcp-project", Region: "us-central1"}, + }, + } + + for tn, tc := range cases { + is := &terraform.InstanceState{ + ID: tc.Attributes["name"], + Attributes: tc.Attributes, + } + + is, err := migrateEndpointsService(tc.StateVersion, is, tc.Meta) + + if err != nil { + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + if !reflect.DeepEqual(is.Attributes, tc.ExpectedAttributes) { + t.Fatalf("Attributes should be `%s` but are `%s`", tc.ExpectedAttributes, is.Attributes) + } + } +} + +func testAccEndpointsService_basic(random_name string) string { + return fmt.Sprintf(`resource "google_endpoints_service" "endpoints_service" { + service_name = "%s.endpoints.%s.cloud.goog" + project = "%s" + openapi_config = < 0 { + return fmt.Errorf("Folder '%s' policy hasn't been deleted.", folder) + } + } + return nil +} + +func testAccCheckGoogleFolderIamPolicy(n string, policy *resourceManagerV2Beta1.Policy) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + p, err := config.clientResourceManagerV2Beta1.Folders.GetIamPolicy(rs.Primary.ID, &resourceManagerV2Beta1.GetIamPolicyRequest{}).Do() + if err != nil { + return err + } + + if !reflect.DeepEqual(p.Bindings, policy.Bindings) { + return fmt.Errorf("Incorrect iam policy bindings. Expected '%s', got '%s'", policy.Bindings, p.Bindings) + } + + if _, ok = rs.Primary.Attributes["etag"]; !ok { + return fmt.Errorf("Etag should be set.") + } + + if rs.Primary.Attributes["etag"] != p.Etag { + return fmt.Errorf("Incorrect etag value. Expected '%s', got '%s'", p.Etag, rs.Primary.Attributes["etag"]) + } + + return nil + } +} + +// Confirm that a folder has an IAM policy with at least 1 binding +func testAccFolderExistingPolicy(org, fname string) resource.TestCheckFunc { + return func(s *terraform.State) error { + c := testAccProvider.Meta().(*Config) + var err error + originalPolicy, err = getFolderIamPolicyByParentAndDisplayName("organizations/"+org, fname, c) + if err != nil { + return fmt.Errorf("Failed to retrieve IAM Policy for folder %q: %s", fname, err) + } + if len(originalPolicy.Bindings) == 0 { + return fmt.Errorf("Refuse to run test against folder with zero IAM Bindings. This is likely an error in the test code that is not properly identifying the IAM policy of a folder.") + } + return nil + } +} + +func testAccFolderIamPolicy_basic(folder, parent string, policy *resourceManagerV2Beta1.Policy) string { + var bindingBuffer bytes.Buffer + + for _, binding := range policy.Bindings { + bindingBuffer.WriteString("binding {\n") + bindingBuffer.WriteString(fmt.Sprintf("role = \"%s\"\n", binding.Role)) + bindingBuffer.WriteString(fmt.Sprintf("members = [\n")) + for _, member := range binding.Members { + bindingBuffer.WriteString(fmt.Sprintf("\"%s\",\n", member)) + } + bindingBuffer.WriteString("]}\n") + } + return fmt.Sprintf(` +resource "google_folder" "permissiontest" { + display_name = "%s" + parent = "%s" +} + +data "google_iam_policy" "test" { + %s +} + +resource "google_folder_iam_policy" "test" { + folder = "${google_folder.permissiontest.name}" + policy_data = "${data.google_iam_policy.test.policy_data}" +} +`, folder, parent, bindingBuffer.String()) +} diff --git a/provider/terraform/tests/resource_google_folder_organization_policy_test.go b/provider/terraform/tests/resource_google_folder_organization_policy_test.go new file mode 100644 index 000000000000..a8b8d0521648 --- /dev/null +++ b/provider/terraform/tests/resource_google_folder_organization_policy_test.go @@ -0,0 +1,376 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/cloudresourcemanager/v1" +) + +func TestAccFolderOrganizationPolicy_boolean(t *testing.T) { + t.Parallel() + + folder := acctest.RandomWithPrefix("tf-test") + + org := getTestOrgFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleFolderOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + // Test creation of an enforced boolean policy + Config: testAccFolderOrganizationPolicy_boolean(org, folder, true), + Check: testAccCheckGoogleFolderOrganizationBooleanPolicy("bool", true), + }, + { + // Test update from enforced to not + Config: testAccFolderOrganizationPolicy_boolean(org, folder, false), + Check: testAccCheckGoogleFolderOrganizationBooleanPolicy("bool", false), + }, + { + Config: " ", + Destroy: true, + }, + { + // Test creation of a not enforced boolean policy + Config: testAccFolderOrganizationPolicy_boolean(org, folder, false), + Check: testAccCheckGoogleFolderOrganizationBooleanPolicy("bool", false), + }, + { + // Test update from not enforced to enforced + Config: testAccFolderOrganizationPolicy_boolean(org, folder, true), + Check: testAccCheckGoogleFolderOrganizationBooleanPolicy("bool", true), + }, + }, + }) +} + +func TestAccFolderOrganizationPolicy_list_allowAll(t *testing.T) { + t.Parallel() + + folder := acctest.RandomWithPrefix("tf-test") + + org := getTestOrgFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleFolderOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccFolderOrganizationPolicy_list_allowAll(org, folder), + Check: testAccCheckGoogleFolderOrganizationListPolicyAll("list", "ALLOW"), + }, + }, + }) +} + +func TestAccFolderOrganizationPolicy_list_allowSome(t *testing.T) { + t.Parallel() + + folder := acctest.RandomWithPrefix("tf-test") + org := getTestOrgFromEnv(t) + project := getTestProjectFromEnv() + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleFolderOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccFolderOrganizationPolicy_list_allowSome(org, folder, project), + Check: testAccCheckGoogleFolderOrganizationListPolicyAllowedValues("list", []string{"projects/" + project}), + }, + }, + }) +} + +func TestAccFolderOrganizationPolicy_list_denySome(t *testing.T) { + t.Parallel() + + folder := acctest.RandomWithPrefix("tf-test") + org := getTestOrgFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleFolderOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccFolderOrganizationPolicy_list_denySome(org, folder), + Check: testAccCheckGoogleFolderOrganizationListPolicyDeniedValues("list", DENIED_ORG_POLICIES), + }, + }, + }) +} + +func TestAccFolderOrganizationPolicy_list_update(t *testing.T) { + t.Parallel() + + folder := acctest.RandomWithPrefix("tf-test") + org := getTestOrgFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleFolderOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccFolderOrganizationPolicy_list_allowAll(org, folder), + Check: testAccCheckGoogleFolderOrganizationListPolicyAll("list", "ALLOW"), + }, + { + Config: testAccFolderOrganizationPolicy_list_denySome(org, folder), + Check: testAccCheckGoogleFolderOrganizationListPolicyDeniedValues("list", DENIED_ORG_POLICIES), + }, + }, + }) +} + +func TestAccFolderOrganizationPolicy_restore_defaultTrue(t *testing.T) { + t.Parallel() + + folder := acctest.RandomWithPrefix("tf-test") + org := getTestOrgFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccFolderOrganizationPolicy_restore_defaultTrue(org, folder), + Check: getGoogleFolderOrganizationRestoreDefaultTrue("restore", &cloudresourcemanager.RestoreDefault{}), + }, + }, + }) +} + +func testAccCheckGoogleFolderOrganizationPolicyDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_folder_organization_policy" { + continue + } + + folder := canonicalFolderId(rs.Primary.Attributes["folder"]) + constraint := canonicalOrgPolicyConstraint(rs.Primary.Attributes["constraint"]) + policy, err := config.clientResourceManager.Folders.GetOrgPolicy(folder, &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: constraint, + }).Do() + + if err != nil { + return err + } + + if policy.ListPolicy != nil || policy.BooleanPolicy != nil { + return fmt.Errorf("Org policy with constraint '%s' hasn't been cleared", constraint) + } + } + return nil +} + +func testAccCheckGoogleFolderOrganizationBooleanPolicy(n string, enforced bool) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleFolderOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if policy.BooleanPolicy.Enforced != enforced { + return fmt.Errorf("Expected boolean policy enforcement to be '%t', got '%t'", enforced, policy.BooleanPolicy.Enforced) + } + + return nil + } +} + +func testAccCheckGoogleFolderOrganizationListPolicyAll(n, policyType string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleFolderOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if len(policy.ListPolicy.AllowedValues) > 0 || len(policy.ListPolicy.DeniedValues) > 0 { + return fmt.Errorf("The `values` field shouldn't be set") + } + + if policy.ListPolicy.AllValues != policyType { + return fmt.Errorf("The list policy should %s all values", policyType) + } + + return nil + } +} + +func testAccCheckGoogleFolderOrganizationListPolicyAllowedValues(n string, values []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleFolderOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + sort.Strings(policy.ListPolicy.AllowedValues) + sort.Strings(values) + if !reflect.DeepEqual(policy.ListPolicy.AllowedValues, values) { + return fmt.Errorf("Expected the list policy to allow '%s', instead allowed '%s'", values, policy.ListPolicy.AllowedValues) + } + + return nil + } +} + +func testAccCheckGoogleFolderOrganizationListPolicyDeniedValues(n string, values []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleFolderOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + sort.Strings(policy.ListPolicy.DeniedValues) + sort.Strings(values) + if !reflect.DeepEqual(policy.ListPolicy.DeniedValues, values) { + return fmt.Errorf("Expected the list policy to deny '%s', instead denied '%s'", values, policy.ListPolicy.DeniedValues) + } + + return nil + } +} + +func getGoogleFolderOrganizationRestoreDefaultTrue(n string, policyDefault *cloudresourcemanager.RestoreDefault) resource.TestCheckFunc { + return func(s *terraform.State) error { + + policy, err := getGoogleFolderOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if !reflect.DeepEqual(policy.RestoreDefault, policyDefault) { + return fmt.Errorf("Expected the restore default '%s', instead denied, %s", policyDefault, policy.RestoreDefault) + } + + return nil + } +} + +func getGoogleFolderOrganizationPolicyTestResource(s *terraform.State, n string) (*cloudresourcemanager.OrgPolicy, error) { + rn := "google_folder_organization_policy." + n + rs, ok := s.RootModule().Resources[rn] + if !ok { + return nil, fmt.Errorf("Not found: %s", rn) + } + + if rs.Primary.ID == "" { + return nil, fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + folder := canonicalFolderId(rs.Primary.Attributes["folder"]) + + return config.clientResourceManager.Folders.GetOrgPolicy(folder, &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: rs.Primary.Attributes["constraint"], + }).Do() +} + +func testAccFolderOrganizationPolicy_boolean(org, folder string, enforced bool) string { + return fmt.Sprintf(` +resource "google_folder" "orgpolicy" { + display_name = "%s" + parent = "%s" +} + +resource "google_folder_organization_policy" "bool" { + # Test numeric folder ID. + folder = "${replace(google_folder.orgpolicy.name, "folders/", "")}" + constraint = "constraints/compute.disableSerialPortAccess" + + boolean_policy { + enforced = %t + } +} +`, folder, "organizations/"+org, enforced) +} + +func testAccFolderOrganizationPolicy_list_allowAll(org, folder string) string { + return fmt.Sprintf(` +resource "google_folder" "orgpolicy" { + display_name = "%s" + parent = "%s" +} + +resource "google_folder_organization_policy" "list" { + folder = "${google_folder.orgpolicy.name}" + constraint = "constraints/serviceuser.services" + + list_policy { + allow { + all = true + } + } +} +`, folder, "organizations/"+org) +} + +func testAccFolderOrganizationPolicy_list_allowSome(org, folder, project string) string { + return fmt.Sprintf(` +resource "google_folder" "orgpolicy" { + display_name = "%s" + parent = "%s" +} + +resource "google_folder_organization_policy" "list" { + folder = "${google_folder.orgpolicy.name}" + constraint = "constraints/compute.trustedImageProjects" + + list_policy { + allow { + values = ["projects/%s"] + } + } +} +`, folder, "organizations/"+org, project) +} + +func testAccFolderOrganizationPolicy_list_denySome(org, folder string) string { + return fmt.Sprintf(` +resource "google_folder" "orgpolicy" { + display_name = "%s" + parent = "%s" +} + +resource "google_folder_organization_policy" "list" { + folder = "${google_folder.orgpolicy.name}" + constraint = "serviceuser.services" + + list_policy { + deny { + values = [ + "doubleclicksearch.googleapis.com", + "replicapoolupdater.googleapis.com", + ] + } + } +} +`, folder, "organizations/"+org) +} + +func testAccFolderOrganizationPolicy_restore_defaultTrue(org, folder string) string { + return fmt.Sprintf(` +resource "google_folder" "orgpolicy" { + display_name = "%s" + parent = "%s" +} + +resource "google_folder_organization_policy" "restore" { + folder = "${google_folder.orgpolicy.name}" + constraint = "serviceuser.services" + + restore_policy { + default = true + } +} +`, folder, "organizations/"+org) +} diff --git a/provider/terraform/tests/resource_google_folder_test.go b/provider/terraform/tests/resource_google_folder_test.go new file mode 100644 index 000000000000..410465a1ce44 --- /dev/null +++ b/provider/terraform/tests/resource_google_folder_test.go @@ -0,0 +1,169 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + resourceManagerV2Beta1 "google.golang.org/api/cloudresourcemanager/v2beta1" +) + +func TestAccFolder_rename(t *testing.T) { + t.Parallel() + + folderDisplayName := "tf-test-" + acctest.RandString(10) + newFolderDisplayName := "tf-test-renamed-" + acctest.RandString(10) + org := getTestOrgFromEnv(t) + parent := "organizations/" + org + folder := resourceManagerV2Beta1.Folder{} + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleFolderDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccFolder_basic(folderDisplayName, parent), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleFolderExists("google_folder.folder1", &folder), + testAccCheckGoogleFolderParent(&folder, parent), + testAccCheckGoogleFolderDisplayName(&folder, folderDisplayName), + ), + }, + resource.TestStep{ + Config: testAccFolder_basic(newFolderDisplayName, parent), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleFolderExists("google_folder.folder1", &folder), + testAccCheckGoogleFolderParent(&folder, parent), + testAccCheckGoogleFolderDisplayName(&folder, newFolderDisplayName), + )}, + resource.TestStep{ + ResourceName: "google_folder.folder1", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccFolder_moveParent(t *testing.T) { + t.Parallel() + + folder1DisplayName := "tf-test-" + acctest.RandString(10) + folder2DisplayName := "tf-test-" + acctest.RandString(10) + org := getTestOrgFromEnv(t) + parent := "organizations/" + org + folder1 := resourceManagerV2Beta1.Folder{} + folder2 := resourceManagerV2Beta1.Folder{} + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleFolderDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccFolder_basic(folder1DisplayName, parent), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleFolderExists("google_folder.folder1", &folder1), + testAccCheckGoogleFolderParent(&folder1, parent), + testAccCheckGoogleFolderDisplayName(&folder1, folder1DisplayName), + ), + }, + resource.TestStep{ + Config: testAccFolder_move(folder1DisplayName, folder2DisplayName, parent), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleFolderExists("google_folder.folder1", &folder1), + testAccCheckGoogleFolderDisplayName(&folder1, folder1DisplayName), + testAccCheckGoogleFolderExists("google_folder.folder2", &folder2), + testAccCheckGoogleFolderParent(&folder2, parent), + testAccCheckGoogleFolderDisplayName(&folder2, folder2DisplayName), + ), + }, + }, + }) +} + +func testAccCheckGoogleFolderDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_folder" { + continue + } + + folder, err := config.clientResourceManagerV2Beta1.Folders.Get(rs.Primary.ID).Do() + if err != nil || folder.LifecycleState != "DELETE_REQUESTED" { + return fmt.Errorf("Folder '%s' hasn't been marked for deletion", rs.Primary.Attributes["display_name"]) + } + } + + return nil +} + +func testAccCheckGoogleFolderExists(n string, folder *resourceManagerV2Beta1.Folder) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + found, err := config.clientResourceManagerV2Beta1.Folders.Get(rs.Primary.ID).Do() + if err != nil { + return err + } + + *folder = *found + + return nil + } +} + +func testAccCheckGoogleFolderDisplayName(folder *resourceManagerV2Beta1.Folder, displayName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if folder.DisplayName != displayName { + return fmt.Errorf("Incorrect display name . Expected '%s', got '%s'", displayName, folder.DisplayName) + } + return nil + } +} + +func testAccCheckGoogleFolderParent(folder *resourceManagerV2Beta1.Folder, parent string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if folder.Parent != parent { + return fmt.Errorf("Incorrect parent. Expected '%s', got '%s'", parent, folder.Parent) + } + return nil + } +} + +func testAccFolder_basic(folder, parent string) string { + return fmt.Sprintf(` +resource "google_folder" "folder1" { + display_name = "%s" + parent = "%s" +} +`, folder, parent) +} + +func testAccFolder_move(folder1, folder2, parent string) string { + return fmt.Sprintf(` +resource "google_folder" "folder1" { + display_name = "%s" + parent = "${google_folder.folder2.name}" +} + +resource "google_folder" "folder2" { + display_name = "%s" + parent = "%s" +} +`, folder1, folder2, parent) +} diff --git a/provider/terraform/tests/resource_google_organization_iam_custom_role_test.go b/provider/terraform/tests/resource_google_organization_iam_custom_role_test.go new file mode 100644 index 000000000000..6d7fef648515 --- /dev/null +++ b/provider/terraform/tests/resource_google_organization_iam_custom_role_test.go @@ -0,0 +1,245 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccOrganizationIamCustomRole_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + roleId := "tfIamCustomRole" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationIamCustomRoleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleOrganizationIamCustomRole_basic(org, roleId), + Check: testAccCheckGoogleOrganizationIamCustomRole( + "google_organization_iam_custom_role.foo", + "My Custom Role", + "foo", + "GA", + []string{"resourcemanager.projects.list"}), + }, + { + Config: testAccCheckGoogleOrganizationIamCustomRole_update(org, roleId), + Check: testAccCheckGoogleOrganizationIamCustomRole( + "google_organization_iam_custom_role.foo", + "My Custom Role Updated", + "bar", + "BETA", + []string{"resourcemanager.projects.list", "resourcemanager.organizations.get"}), + }, + { + ResourceName: "google_organization_iam_custom_role.foo", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccOrganizationIamCustomRole_undelete(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + roleId := "tfIamCustomRole" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationIamCustomRoleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleOrganizationIamCustomRole_basic(org, roleId), + Check: testAccCheckGoogleOrganizationIamCustomRoleDeletionStatus("google_organization_iam_custom_role.foo", false), + }, + // Soft-delete + { + Config: testAccCheckGoogleOrganizationIamCustomRole_deleted(org, roleId), + Check: testAccCheckGoogleOrganizationIamCustomRoleDeletionStatus("google_organization_iam_custom_role.foo", true), + }, + // Undelete + { + Config: testAccCheckGoogleOrganizationIamCustomRole_basic(org, roleId), + Check: testAccCheckGoogleOrganizationIamCustomRoleDeletionStatus("google_organization_iam_custom_role.foo", false), + }, + }, + }) +} + +func TestAccOrganizationIamCustomRole_createAfterDestroy(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + roleId := "tfIamCustomRole" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationIamCustomRoleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleOrganizationIamCustomRole_basic(org, roleId), + Check: testAccCheckGoogleOrganizationIamCustomRole( + "google_organization_iam_custom_role.foo", + "My Custom Role", + "foo", + "GA", + []string{"resourcemanager.projects.list"}), + }, + // Destroy resources + { + Config: " ", + Destroy: true, + }, + // Re-create with no existing state + { + Config: testAccCheckGoogleOrganizationIamCustomRole_basic(org, roleId), + Check: testAccCheckGoogleOrganizationIamCustomRole( + "google_organization_iam_custom_role.foo", + "My Custom Role", + "foo", + "GA", + []string{"resourcemanager.projects.list"}), + }, + }, + }) +} + +func testAccCheckGoogleOrganizationIamCustomRoleDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_organization_iam_custom_role" { + continue + } + + role, err := config.clientIAM.Organizations.Roles.Get(rs.Primary.ID).Do() + + if err != nil { + return err + } + + if !role.Deleted { + return fmt.Errorf("Iam custom role still exists") + } + + } + + return nil +} + +func testAccCheckGoogleOrganizationIamCustomRole(n, title, description, stage string, permissions []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + role, err := config.clientIAM.Organizations.Roles.Get(rs.Primary.ID).Do() + + if err != nil { + return err + } + + if title != role.Title { + return fmt.Errorf("Incorrect title. Expected %q, got %q", title, role.Title) + } + + if description != role.Description { + return fmt.Errorf("Incorrect description. Expected %q, got %q", description, role.Description) + } + + if stage != role.Stage { + return fmt.Errorf("Incorrect stage. Expected %q, got %q", stage, role.Stage) + } + + sort.Strings(permissions) + sort.Strings(role.IncludedPermissions) + if !reflect.DeepEqual(permissions, role.IncludedPermissions) { + return fmt.Errorf("Incorrect permissions. Expected %q, got %q", permissions, role.IncludedPermissions) + } + + return nil + } +} + +func testAccCheckGoogleOrganizationIamCustomRoleDeletionStatus(n string, deleted bool) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + role, err := config.clientIAM.Organizations.Roles.Get(rs.Primary.ID).Do() + + if err != nil { + return err + } + + if deleted != role.Deleted { + return fmt.Errorf("Incorrect deletion status. Expected %t, got %t", deleted, role.Deleted) + } + + return nil + } +} + +func testAccCheckGoogleOrganizationIamCustomRole_basic(orgId, roleId string) string { + return fmt.Sprintf(` +resource "google_organization_iam_custom_role" "foo" { + role_id = "%s" + org_id = "%s" + title = "My Custom Role" + description = "foo" + permissions = ["resourcemanager.projects.list"] +} +`, roleId, orgId) +} + +func testAccCheckGoogleOrganizationIamCustomRole_deleted(orgId, roleId string) string { + return fmt.Sprintf(` +resource "google_organization_iam_custom_role" "foo" { + role_id = "%s" + org_id = "%s" + title = "My Custom Role" + description = "foo" + permissions = ["resourcemanager.projects.list"] + deleted = true +} +`, roleId, orgId) +} + +func testAccCheckGoogleOrganizationIamCustomRole_update(orgId, roleId string) string { + return fmt.Sprintf(` +resource "google_organization_iam_custom_role" "foo" { + role_id = "%s" + org_id = "%s" + title = "My Custom Role Updated" + description = "bar" + permissions = ["resourcemanager.projects.list", "resourcemanager.organizations.get"] + stage = "BETA" +} +`, roleId, orgId) +} diff --git a/provider/terraform/tests/resource_google_organization_iam_test.go b/provider/terraform/tests/resource_google_organization_iam_test.go new file mode 100644 index 000000000000..8b0df19e9d99 --- /dev/null +++ b/provider/terraform/tests/resource_google_organization_iam_test.go @@ -0,0 +1,206 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/cloudresourcemanager/v1" +) + +// Bindings and members are tested serially to avoid concurrent updates of the org's IAM policy. +// When concurrent changes happen, the behavior is to abort and ask the user to retry allowing +// them to see the new diff instead of blindly overriding the policy stored in GCP. This desired +// behavior however induces flakiness in our acceptance tests, hence the need for running them +// serially. +// Policies are *not tested*, because testing them will ruin changes made to the test org. +func TestAccOrganizationIam(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + account := acctest.RandomWithPrefix("tf-test") + roleId := "tfIamTest" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test Iam Binding creation + Config: testAccOrganizationIamBinding_basic(account, roleId, org), + Check: testAccCheckGoogleOrganizationIamBindingExists("foo", "test-role", []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_organization_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s organizations/%s/roles/%s", org, org, roleId), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Binding update + Config: testAccOrganizationIamBinding_update(account, roleId, org), + Check: testAccCheckGoogleOrganizationIamBindingExists("foo", "test-role", []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + fmt.Sprintf("serviceAccount:%s-2@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_organization_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s organizations/%s/roles/%s", org, org, roleId), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccOrganizationIamMember_basic(account, org), + Check: testAccCheckGoogleOrganizationIamMemberExists("foo", "roles/browser", + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + ), + }, + { + ResourceName: "google_organization_iam_member.foo", + ImportStateId: fmt.Sprintf("%s roles/browser serviceAccount:%s@%s.iam.gserviceaccount.com", org, account, getTestProjectFromEnv()), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckGoogleOrganizationIamBindingExists(bindingResourceName, roleResourceName string, members []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + bindingRs, ok := s.RootModule().Resources["google_organization_iam_binding."+bindingResourceName] + if !ok { + return fmt.Errorf("Not found: %s", bindingResourceName) + } + + roleRs, ok := s.RootModule().Resources["google_organization_iam_custom_role."+roleResourceName] + if !ok { + return fmt.Errorf("Not found: %s", roleResourceName) + } + + config := testAccProvider.Meta().(*Config) + p, err := config.clientResourceManager.Organizations.GetIamPolicy("organizations/"+bindingRs.Primary.Attributes["org_id"], &cloudresourcemanager.GetIamPolicyRequest{}).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == roleRs.Primary.ID { + sort.Strings(members) + sort.Strings(binding.Members) + + if reflect.DeepEqual(members, binding.Members) { + return nil + } + + return fmt.Errorf("Binding found but expected members is %v, got %v", members, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", roleRs.Primary.ID) + } +} + +func testAccCheckGoogleOrganizationIamMemberExists(n, role, member string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources["google_organization_iam_member."+n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + config := testAccProvider.Meta().(*Config) + p, err := config.clientResourceManager.Organizations.GetIamPolicy("organizations/"+rs.Primary.Attributes["org_id"], &cloudresourcemanager.GetIamPolicyRequest{}).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == role { + for _, m := range binding.Members { + if m == member { + return nil + } + } + + return fmt.Errorf("Missing member %q, got %v", member, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", role) + } +} + +// We are using a custom role since iam_binding is authoritative on the member list and +// we want to avoid removing members from an existing role to prevent unwanted side effects. +func testAccOrganizationIamBinding_basic(account, role, org string) string { + return fmt.Sprintf(` +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +resource "google_organization_iam_custom_role" "test-role" { + role_id = "%s" + org_id = "%s" + title = "Iam Testing Role" + permissions = ["genomics.datasets.get"] +} + +resource "google_organization_iam_binding" "foo" { + org_id = "%s" + role = "${google_organization_iam_custom_role.test-role.id}" + members = ["serviceAccount:${google_service_account.test-account.email}"] +} +`, account, role, org, org) +} + +func testAccOrganizationIamBinding_update(account, role, org string) string { + return fmt.Sprintf(` +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +resource "google_organization_iam_custom_role" "test-role" { + role_id = "%s" + org_id = "%s" + title = "Iam Testing Role" + permissions = ["genomics.datasets.get"] +} + +resource "google_service_account" "test-account-2" { + account_id = "%s-2" + display_name = "Iam Testing Account" +} + +resource "google_organization_iam_binding" "foo" { + org_id = "%s" + role = "${google_organization_iam_custom_role.test-role.id}" + members = [ + "serviceAccount:${google_service_account.test-account.email}", + "serviceAccount:${google_service_account.test-account-2.email}" + ] +} +`, account, role, org, account, org) +} + +func testAccOrganizationIamMember_basic(account, org string) string { + return fmt.Sprintf(` +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +resource "google_organization_iam_member" "foo" { + org_id = "%s" + role = "roles/browser" + member = "serviceAccount:${google_service_account.test-account.email}" +} +`, account, org) +} diff --git a/provider/terraform/tests/resource_google_organization_policy_test.go b/provider/terraform/tests/resource_google_organization_policy_test.go new file mode 100644 index 000000000000..b62a0bac9272 --- /dev/null +++ b/provider/terraform/tests/resource_google_organization_policy_test.go @@ -0,0 +1,391 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/cloudresourcemanager/v1" +) + +var DENIED_ORG_POLICIES = []string{ + "doubleclicksearch.googleapis.com", + "replicapoolupdater.googleapis.com", +} + +// Since each test here is acting on the same organization, run the tests serially to +// avoid race conditions and aborted operations. +func TestAccOrganizationPolicy(t *testing.T) { + testCases := map[string]func(t *testing.T){ + "boolean": testAccOrganizationPolicy_boolean, + "list_allowAll": testAccOrganizationPolicy_list_allowAll, + "list_allowSome": testAccOrganizationPolicy_list_allowSome, + "list_denySome": testAccOrganizationPolicy_list_denySome, + "list_update": testAccOrganizationPolicy_list_update, + "restore_policy": testAccOrganizationPolicy_restore_defaultTrue, + } + + for name, tc := range testCases { + // shadow the tc variable into scope so that when + // the loop continues, if t.Run hasn't executed tc(t) + // yet, we don't have a race condition + // see https://github.com/golang/go/wiki/CommonMistakes#using-goroutines-on-loop-iterator-variables + tc := tc + t.Run(name, func(t *testing.T) { + tc(t) + }) + } +} + +func testAccOrganizationPolicy_boolean(t *testing.T) { + org := getTestOrgTargetFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + // Test creation of an enforced boolean policy + Config: testAccOrganizationPolicyConfig_boolean(org, true), + Check: testAccCheckGoogleOrganizationBooleanPolicy("bool", true), + }, + { + // Test update from enforced to not + Config: testAccOrganizationPolicyConfig_boolean(org, false), + Check: testAccCheckGoogleOrganizationBooleanPolicy("bool", false), + }, + { + Config: " ", + Destroy: true, + }, + { + // Test creation of a not enforced boolean policy + Config: testAccOrganizationPolicyConfig_boolean(org, false), + Check: testAccCheckGoogleOrganizationBooleanPolicy("bool", false), + }, + { + // Test update from not enforced to enforced + Config: testAccOrganizationPolicyConfig_boolean(org, true), + Check: testAccCheckGoogleOrganizationBooleanPolicy("bool", true), + }, + { + ResourceName: "google_organization_policy.bool", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) + +} + +func testAccOrganizationPolicy_list_allowAll(t *testing.T) { + org := getTestOrgTargetFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccOrganizationPolicyConfig_list_allowAll(org), + Check: testAccCheckGoogleOrganizationListPolicyAll("list", "ALLOW"), + }, + { + ResourceName: "google_organization_policy.list", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccOrganizationPolicy_list_allowSome(t *testing.T) { + org := getTestOrgTargetFromEnv(t) + project := getTestProjectFromEnv() + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccOrganizationPolicyConfig_list_allowSome(org, project), + Check: testAccCheckGoogleOrganizationListPolicyAllowedValues("list", []string{"projects/" + project, "projects/debian-cloud"}), + }, + { + ResourceName: "google_organization_policy.list", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccOrganizationPolicy_list_denySome(t *testing.T) { + org := getTestOrgTargetFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccOrganizationPolicyConfig_list_denySome(org), + Check: testAccCheckGoogleOrganizationListPolicyDeniedValues("list", DENIED_ORG_POLICIES), + }, + { + ResourceName: "google_organization_policy.list", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccOrganizationPolicy_list_update(t *testing.T) { + org := getTestOrgTargetFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccOrganizationPolicyConfig_list_allowAll(org), + Check: testAccCheckGoogleOrganizationListPolicyAll("list", "ALLOW"), + }, + { + Config: testAccOrganizationPolicyConfig_list_denySome(org), + Check: testAccCheckGoogleOrganizationListPolicyDeniedValues("list", DENIED_ORG_POLICIES), + }, + { + ResourceName: "google_organization_policy.list", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccOrganizationPolicy_restore_defaultTrue(t *testing.T) { + org := getTestOrgTargetFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccOrganizationPolicyConfig_restore_defaultTrue(org), + Check: testAccCheckGoogleOrganizationRestoreDefaultTrue("restore", &cloudresourcemanager.RestoreDefault{}), + }, + { + ResourceName: "google_organization_policy.restore", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckGoogleOrganizationPolicyDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_organization_policy" { + continue + } + + org := "organizations/" + rs.Primary.Attributes["org_id"] + constraint := canonicalOrgPolicyConstraint(rs.Primary.Attributes["constraint"]) + policy, err := config.clientResourceManager.Organizations.GetOrgPolicy(org, &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: constraint, + }).Do() + + if err != nil { + return err + } + + if policy.ListPolicy != nil || policy.BooleanPolicy != nil { + return fmt.Errorf("Org policy with constraint '%s' hasn't been cleared", constraint) + } + } + return nil +} + +func testAccCheckGoogleOrganizationBooleanPolicy(n string, enforced bool) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if policy.BooleanPolicy.Enforced != enforced { + return fmt.Errorf("Expected boolean policy enforcement to be '%t', got '%t'", enforced, policy.BooleanPolicy.Enforced) + } + + return nil + } +} + +func testAccCheckGoogleOrganizationListPolicyAll(n, policyType string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if len(policy.ListPolicy.AllowedValues) > 0 || len(policy.ListPolicy.DeniedValues) > 0 { + return fmt.Errorf("The `values` field shouldn't be set") + } + + if policy.ListPolicy.AllValues != policyType { + return fmt.Errorf("Expected the list policy to '%s' all values, got '%s'", policyType, policy.ListPolicy.AllValues) + } + + return nil + } +} + +func testAccCheckGoogleOrganizationListPolicyAllowedValues(n string, values []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + sort.Strings(policy.ListPolicy.AllowedValues) + sort.Strings(values) + if !reflect.DeepEqual(policy.ListPolicy.AllowedValues, values) { + return fmt.Errorf("Expected the list policy to allow '%s', instead allowed '%s'", values, policy.ListPolicy.AllowedValues) + } + + return nil + } +} + +func testAccCheckGoogleOrganizationListPolicyDeniedValues(n string, values []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + sort.Strings(policy.ListPolicy.DeniedValues) + sort.Strings(values) + if !reflect.DeepEqual(policy.ListPolicy.DeniedValues, values) { + return fmt.Errorf("Expected the list policy to deny '%s', instead denied '%s'", values, policy.ListPolicy.DeniedValues) + } + + return nil + } +} + +func testAccCheckGoogleOrganizationRestoreDefaultTrue(n string, policyDefault *cloudresourcemanager.RestoreDefault) resource.TestCheckFunc { + return func(s *terraform.State) error { + + policy, err := getGoogleOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if !reflect.DeepEqual(policy.RestoreDefault, policyDefault) { + return fmt.Errorf("Expected the restore default '%s', instead denied, %s", policyDefault, policy.RestoreDefault) + } + + return nil + } +} + +func getGoogleOrganizationPolicyTestResource(s *terraform.State, n string) (*cloudresourcemanager.OrgPolicy, error) { + rn := "google_organization_policy." + n + rs, ok := s.RootModule().Resources[rn] + if !ok { + return nil, fmt.Errorf("Not found: %s", rn) + } + + if rs.Primary.ID == "" { + return nil, fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + return config.clientResourceManager.Organizations.GetOrgPolicy("organizations/"+rs.Primary.Attributes["org_id"], &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: rs.Primary.Attributes["constraint"], + }).Do() +} + +func testAccOrganizationPolicyConfig_boolean(org string, enforced bool) string { + return fmt.Sprintf(` +resource "google_organization_policy" "bool" { + org_id = "%s" + constraint = "constraints/compute.disableSerialPortAccess" + + boolean_policy { + enforced = %t + } +} +`, org, enforced) +} + +func testAccOrganizationPolicyConfig_list_allowAll(org string) string { + return fmt.Sprintf(` +resource "google_organization_policy" "list" { + org_id = "%s" + constraint = "constraints/serviceuser.services" + + list_policy { + allow { + all = true + } + } +} +`, org) +} + +func testAccOrganizationPolicyConfig_list_allowSome(org, project string) string { + return fmt.Sprintf(` +resource "google_organization_policy" "list" { + org_id = "%s" + constraint = "constraints/compute.trustedImageProjects" + + list_policy { + allow { + values = [ + "projects/%s", + "projects/debian-cloud" + ] + } + } +} +`, org, project) +} + +func testAccOrganizationPolicyConfig_list_denySome(org string) string { + return fmt.Sprintf(` +resource "google_organization_policy" "list" { + org_id = "%s" + constraint = "serviceuser.services" + + list_policy { + deny { + values = [ + "doubleclicksearch.googleapis.com", + "replicapoolupdater.googleapis.com", + ] + } + } +} +`, org) +} + +func testAccOrganizationPolicyConfig_restore_defaultTrue(org string) string { + return fmt.Sprintf(` +resource "google_organization_policy" "restore" { + org_id = "%s" + constraint = "serviceuser.services" + + restore_policy { + default = true + } +} +`, org) +} diff --git a/provider/terraform/tests/resource_google_project_iam_binding_test.go b/provider/terraform/tests/resource_google_project_iam_binding_test.go new file mode 100644 index 000000000000..1bdbf1448306 --- /dev/null +++ b/provider/terraform/tests/resource_google_project_iam_binding_test.go @@ -0,0 +1,257 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func projectIamBindingImportStep(resourceName, pid, role string) resource.TestStep { + return resource.TestStep{ + ResourceName: resourceName, + ImportStateId: fmt.Sprintf("%s %s", pid, role), + ImportState: true, + ImportStateVerify: true, + } +} + +// Test that an IAM binding can be applied to a project +func TestAccProjectIamBinding_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + role := "roles/compute.instanceAdmin" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + // Apply an IAM binding + { + Config: testAccProjectAssociateBindingBasic(pid, pname, org, role), + }, + projectIamBindingImportStep("google_project_iam_binding.acceptance", pid, role), + }, + }) +} + +// Test that multiple IAM bindings can be applied to a project, one at a time +func TestAccProjectIamBinding_multiple(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + role := "roles/compute.instanceAdmin" + role2 := "roles/viewer" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + // Apply an IAM binding + { + Config: testAccProjectAssociateBindingBasic(pid, pname, org, role), + }, + // Apply another IAM binding + { + Config: testAccProjectAssociateBindingMultiple(pid, pname, org, role, role2), + }, + projectIamBindingImportStep("google_project_iam_binding.acceptance", pid, role), + projectIamBindingImportStep("google_project_iam_binding.multiple", pid, role2), + }, + }) +} + +// Test that multiple IAM bindings can be applied to a project all at once +func TestAccProjectIamBinding_multipleAtOnce(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + role := "roles/compute.instanceAdmin" + role2 := "roles/viewer" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + // Apply an IAM binding + { + Config: testAccProjectAssociateBindingMultiple(pid, pname, org, role, role2), + }, + projectIamBindingImportStep("google_project_iam_binding.acceptance", pid, role), + projectIamBindingImportStep("google_project_iam_binding.multiple", pid, role2), + }, + }) +} + +// Test that an IAM binding can be updated once applied to a project +func TestAccProjectIamBinding_update(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + role := "roles/compute.instanceAdmin" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + // Apply an IAM binding + { + Config: testAccProjectAssociateBindingBasic(pid, pname, org, role), + }, + projectIamBindingImportStep("google_project_iam_binding.acceptance", pid, role), + + // Apply an updated IAM binding + { + Config: testAccProjectAssociateBindingUpdated(pid, pname, org, role), + }, + projectIamBindingImportStep("google_project_iam_binding.acceptance", pid, role), + + // Drop the original member + { + Config: testAccProjectAssociateBindingDropMemberFromBasic(pid, pname, org, role), + }, + projectIamBindingImportStep("google_project_iam_binding.acceptance", pid, role), + }, + }) +} + +// Test that an IAM binding can be removed from a project +func TestAccProjectIamBinding_remove(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + role := "roles/compute.instanceAdmin" + role2 := "roles/viewer" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + // Apply multiple IAM bindings + { + Config: testAccProjectAssociateBindingMultiple(pid, pname, org, role, role2), + }, + projectIamBindingImportStep("google_project_iam_binding.acceptance", pid, role), + projectIamBindingImportStep("google_project_iam_binding.multiple", pid, role2), + + // Remove the bindings + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + }, + }) +} + +func testAccProjectAssociateBindingBasic(pid, name, org, role string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_iam_binding" "acceptance" { + project = "${google_project.acceptance.project_id}" + members = ["user:admin@hashicorptest.com"] + role = "%s" +} +`, pid, name, org, role) +} + +func testAccProjectAssociateBindingMultiple(pid, name, org, role, role2 string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_iam_binding" "acceptance" { + project = "${google_project.acceptance.project_id}" + members = ["user:admin@hashicorptest.com"] + role = "%s" +} + +resource "google_project_iam_binding" "multiple" { + project = "${google_project.acceptance.project_id}" + members = ["user:paddy@hashicorp.com"] + role = "%s" +} +`, pid, name, org, role, role2) +} + +func testAccProjectAssociateBindingUpdated(pid, name, org, role string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_iam_binding" "acceptance" { + project = "${google_project.acceptance.project_id}" + members = ["user:admin@hashicorptest.com", "user:paddy@hashicorp.com"] + role = "%s" +} +`, pid, name, org, role) +} + +func testAccProjectAssociateBindingDropMemberFromBasic(pid, name, org, role string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_iam_binding" "acceptance" { + project = "${google_project.acceptance.project_id}" + members = ["user:paddy@hashicorp.com"] + role = "%s" +} +`, pid, name, org, role) +} diff --git a/provider/terraform/tests/resource_google_project_iam_custom_role_test.go b/provider/terraform/tests/resource_google_project_iam_custom_role_test.go new file mode 100644 index 000000000000..f0e396ca0a30 --- /dev/null +++ b/provider/terraform/tests/resource_google_project_iam_custom_role_test.go @@ -0,0 +1,177 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccProjectIamCustomRole_basic(t *testing.T) { + t.Parallel() + + roleId := "tfIamCustomRole" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectIamCustomRoleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleProjectIamCustomRole_basic(roleId), + Check: resource.TestCheckResourceAttr("google_project_iam_custom_role.foo", "stage", "GA"), + }, + { + ResourceName: "google_project_iam_custom_role.foo", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccCheckGoogleProjectIamCustomRole_update(roleId), + }, + { + ResourceName: "google_project_iam_custom_role.foo", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccProjectIamCustomRole_undelete(t *testing.T) { + t.Parallel() + + roleId := "tfIamCustomRole" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectIamCustomRoleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleProjectIamCustomRole_basic(roleId), + Check: resource.TestCheckResourceAttr("google_project_iam_custom_role.foo", "deleted", "false"), + }, + { + ResourceName: "google_project_iam_custom_role.foo", + ImportState: true, + ImportStateVerify: true, + }, + // Soft-delete + { + Config: testAccCheckGoogleProjectIamCustomRole_deleted(roleId), + Check: resource.TestCheckResourceAttr("google_project_iam_custom_role.foo", "deleted", "true"), + }, + { + ResourceName: "google_project_iam_custom_role.foo", + ImportState: true, + ImportStateVerify: true, + }, + // Undelete + { + Config: testAccCheckGoogleProjectIamCustomRole_basic(roleId), + Check: resource.TestCheckResourceAttr("google_project_iam_custom_role.foo", "deleted", "false"), + }, + { + ResourceName: "google_project_iam_custom_role.foo", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccProjectIamCustomRole_createAfterDestroy(t *testing.T) { + t.Parallel() + + roleId := "tfIamCustomRole" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectIamCustomRoleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckGoogleProjectIamCustomRole_basic(roleId), + }, + { + ResourceName: "google_project_iam_custom_role.foo", + ImportState: true, + ImportStateVerify: true, + }, + // Destroy resources + { + Config: " ", + Destroy: true, + }, + // Re-create with no existing state + { + Config: testAccCheckGoogleProjectIamCustomRole_basic(roleId), + }, + { + ResourceName: "google_project_iam_custom_role.foo", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckGoogleProjectIamCustomRoleDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_project_iam_custom_role" { + continue + } + + role, err := config.clientIAM.Projects.Roles.Get(rs.Primary.ID).Do() + + if err != nil { + return err + } + + if !role.Deleted { + return fmt.Errorf("Iam custom role still exists") + } + + } + + return nil +} + +func testAccCheckGoogleProjectIamCustomRole_basic(roleId string) string { + return fmt.Sprintf(` +resource "google_project_iam_custom_role" "foo" { + role_id = "%s" + title = "My Custom Role" + description = "foo" + permissions = ["iam.roles.list"] +} +`, roleId) +} + +func testAccCheckGoogleProjectIamCustomRole_deleted(roleId string) string { + return fmt.Sprintf(` +resource "google_project_iam_custom_role" "foo" { + role_id = "%s" + title = "My Custom Role" + description = "foo" + permissions = ["iam.roles.list"] + deleted = true +} +`, roleId) +} + +func testAccCheckGoogleProjectIamCustomRole_update(roleId string) string { + return fmt.Sprintf(` +resource "google_project_iam_custom_role" "foo" { + role_id = "%s" + title = "My Custom Role Updated" + description = "bar" + permissions = ["iam.roles.list", "iam.roles.create", "iam.roles.delete"] + stage = "BETA" +} +`, roleId) +} diff --git a/provider/terraform/tests/resource_google_project_iam_member_test.go b/provider/terraform/tests/resource_google_project_iam_member_test.go new file mode 100644 index 000000000000..df280216ad25 --- /dev/null +++ b/provider/terraform/tests/resource_google_project_iam_member_test.go @@ -0,0 +1,169 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func projectIamMemberImportStep(resourceName, pid, role, member string) resource.TestStep { + return resource.TestStep{ + ResourceName: resourceName, + ImportStateId: fmt.Sprintf("%s %s %s", pid, role, member), + ImportState: true, + ImportStateVerify: true, + } +} + +// Test that an IAM binding can be applied to a project +func TestAccProjectIamMember_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + resourceName := "google_project_iam_member.acceptance" + role := "roles/compute.instanceAdmin" + member := "user:admin@hashicorptest.com" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + // Apply an IAM binding + { + Config: testAccProjectAssociateMemberBasic(pid, pname, org, role, member), + }, + projectIamMemberImportStep(resourceName, pid, role, member), + }, + }) +} + +// Test that multiple IAM bindings can be applied to a project +func TestAccProjectIamMember_multiple(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + skipIfEnvNotSet(t, "GOOGLE_ORG") + + pid := "terraform-" + acctest.RandString(10) + resourceName := "google_project_iam_member.acceptance" + resourceName2 := "google_project_iam_member.multiple" + role := "roles/compute.instanceAdmin" + member := "user:admin@hashicorptest.com" + member2 := "user:paddy@hashicorp.com" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + // Apply an IAM binding + { + Config: testAccProjectAssociateMemberBasic(pid, pname, org, role, member), + }, + projectIamMemberImportStep(resourceName, pid, role, member), + + // Apply another IAM binding + { + Config: testAccProjectAssociateMemberMultiple(pid, pname, org, role, member, role, member2), + }, + projectIamMemberImportStep(resourceName, pid, role, member), + projectIamMemberImportStep(resourceName2, pid, role, member2), + }, + }) +} + +// Test that an IAM binding can be removed from a project +func TestAccProjectIamMember_remove(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + skipIfEnvNotSet(t, "GOOGLE_ORG") + + pid := "terraform-" + acctest.RandString(10) + resourceName := "google_project_iam_member.acceptance" + role := "roles/compute.instanceAdmin" + member := "user:admin@hashicorptest.com" + member2 := "user:paddy@hashicorp.com" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + + // Apply multiple IAM bindings + { + Config: testAccProjectAssociateMemberMultiple(pid, pname, org, role, member, role, member2), + }, + projectIamMemberImportStep(resourceName, pid, role, member), + projectIamMemberImportStep(resourceName, pid, role, member2), + + // Remove the bindings + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + }, + }) +} + +func testAccProjectAssociateMemberBasic(pid, name, org, role, member string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_iam_member" "acceptance" { + project = "${google_project.acceptance.project_id}" + role = "%s" + member = "%s" +} +`, pid, name, org, role, member) +} + +func testAccProjectAssociateMemberMultiple(pid, name, org, role, member, role2, member2 string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_iam_member" "acceptance" { + project = "${google_project.acceptance.project_id}" + role = "%s" + member = "%s" +} + +resource "google_project_iam_member" "multiple" { + project = "${google_project.acceptance.project_id}" + role = "%s" + member = "%s" +} +`, pid, name, org, role, member, role2, member2) +} diff --git a/provider/terraform/tests/resource_google_project_iam_policy_test.go b/provider/terraform/tests/resource_google_project_iam_policy_test.go new file mode 100644 index 000000000000..a09460eda0ab --- /dev/null +++ b/provider/terraform/tests/resource_google_project_iam_policy_test.go @@ -0,0 +1,780 @@ +package google + +import ( + "encoding/json" + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/cloudresourcemanager/v1" +) + +func TestSubtractIamPolicy(t *testing.T) { + table := []struct { + a *cloudresourcemanager.Policy + b *cloudresourcemanager.Policy + expect cloudresourcemanager.Policy + }{ + { + a: &cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "1", + "2", + }, + }, + { + Role: "b", + Members: []string{ + "1", + "2", + }, + }, + }, + }, + b: &cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "3", + "4", + }, + }, + { + Role: "b", + Members: []string{ + "1", + "2", + }, + }, + }, + }, + expect: cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "1", + "2", + }, + }, + }, + }, + }, + { + a: &cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "1", + "2", + }, + }, + { + Role: "b", + Members: []string{ + "1", + "2", + }, + }, + }, + }, + b: &cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "1", + "2", + }, + }, + { + Role: "b", + Members: []string{ + "1", + "2", + }, + }, + }, + }, + expect: cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{}, + }, + }, + { + a: &cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "1", + "2", + "3", + }, + }, + { + Role: "b", + Members: []string{ + "1", + "2", + "3", + }, + }, + }, + }, + b: &cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "1", + "3", + }, + }, + { + Role: "b", + Members: []string{ + "1", + "2", + "3", + }, + }, + }, + }, + expect: cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "2", + }, + }, + }, + }, + }, + { + a: &cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "1", + "2", + "3", + }, + }, + { + Role: "b", + Members: []string{ + "1", + "2", + "3", + }, + }, + }, + }, + b: &cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{ + { + Role: "a", + Members: []string{ + "1", + "2", + "3", + }, + }, + { + Role: "b", + Members: []string{ + "1", + "2", + "3", + }, + }, + }, + }, + expect: cloudresourcemanager.Policy{ + Bindings: []*cloudresourcemanager.Binding{}, + }, + }, + } + + for _, test := range table { + c := subtractIamPolicy(test.a, test.b) + sort.Sort(sortableBindings(c.Bindings)) + for i, _ := range c.Bindings { + sort.Strings(c.Bindings[i].Members) + } + + if !reflect.DeepEqual(derefBindings(c.Bindings), derefBindings(test.expect.Bindings)) { + t.Errorf("\ngot %+v\nexpected %+v", derefBindings(c.Bindings), derefBindings(test.expect.Bindings)) + } + } +} + +// Test that an IAM policy can be applied to a project +func TestAccProjectIamPolicy_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + resource.TestStep{ + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + // Apply an IAM policy from a data source. The application + // merges policies, so we validate the expected state. + resource.TestStep{ + Config: testAccProjectAssociatePolicyBasic(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectIamPolicyIsMerged("google_project_iam_policy.acceptance", "data.google_iam_policy.admin", pid), + ), + }, + resource.TestStep{ + ResourceName: "google_project_iam_policy.acceptance", + ImportState: true, + // Skipping the normal "ImportStateVerify" - Unfortunately, it's not + // really possible to make the imported policy match exactly, since + // the policy depends on the service account being used to create the + // project. + }, + // Finally, remove the custom IAM policy from config and apply, then + // confirm that the project is in its original state. + resource.TestStep{ + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(pid), + ), + }, + }, + }) +} + +// Test that an IAM policy can be applied to a project when no project is set in the resource +func TestAccProjectIamPolicy_defaultProject(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project + resource.TestStep{ + Config: testAccProjectDefaultAssociatePolicyBasic(), + Check: resource.ComposeTestCheckFunc( + testAccProjectExistingPolicy(getTestProjectFromEnv()), + ), + }, + // Apply an IAM policy from a data source. The application + // merges policies, so we validate the expected state. + resource.TestStep{ + Config: testAccProjectDefaultAssociatePolicyBasic(), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectIamPolicyIsMerged("google_project_iam_policy.acceptance", "data.google_iam_policy.admin", getTestProjectFromEnv()), + ), + }, + }, + }) +} + +// Test that a non-collapsed IAM policy doesn't perpetually diff +func TestAccProjectIamPolicy_expanded(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccProjectAssociatePolicyExpanded(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectIamPolicyExists("google_project_iam_policy.acceptance", "data.google_iam_policy.expanded", pid), + ), + }, + }, + }) +} + +func getStatePrimaryResource(s *terraform.State, res, expectedID string) (*terraform.InstanceState, error) { + // Get the project resource + resource, ok := s.RootModule().Resources[res] + if !ok { + return nil, fmt.Errorf("Not found: %s", res) + } + if resource.Primary.Attributes["id"] != expectedID && expectedID != "" { + return nil, fmt.Errorf("Expected project %q to match ID %q in state", resource.Primary.ID, expectedID) + } + return resource.Primary, nil +} + +func getGoogleProjectIamPolicyFromResource(resource *terraform.InstanceState) (cloudresourcemanager.Policy, error) { + var p cloudresourcemanager.Policy + ps, ok := resource.Attributes["policy_data"] + if !ok { + return p, fmt.Errorf("Resource %q did not have a 'policy_data' attribute. Attributes were %#v", resource.ID, resource.Attributes) + } + if err := json.Unmarshal([]byte(ps), &p); err != nil { + return p, fmt.Errorf("Could not unmarshal %s:\n: %v", ps, err) + } + return p, nil +} + +func getGoogleProjectIamPolicyFromState(s *terraform.State, res, expectedID string) (cloudresourcemanager.Policy, error) { + project, err := getStatePrimaryResource(s, res, expectedID) + if err != nil { + return cloudresourcemanager.Policy{}, err + } + return getGoogleProjectIamPolicyFromResource(project) +} + +func compareBindings(a, b []*cloudresourcemanager.Binding) bool { + a = mergeBindings(a) + b = mergeBindings(b) + sort.Sort(sortableBindings(a)) + sort.Sort(sortableBindings(b)) + return reflect.DeepEqual(derefBindings(a), derefBindings(b)) +} + +func testAccCheckGoogleProjectIamPolicyExists(projectRes, policyRes, pid string) resource.TestCheckFunc { + return func(s *terraform.State) error { + projectPolicy, err := getGoogleProjectIamPolicyFromState(s, projectRes, pid) + if err != nil { + return fmt.Errorf("Error retrieving IAM policy for project from state: %s", err) + } + policyPolicy, err := getGoogleProjectIamPolicyFromState(s, policyRes, "") + if err != nil { + return fmt.Errorf("Error retrieving IAM policy for data_policy from state: %s", err) + } + + // The bindings in both policies should be identical + if !compareBindings(projectPolicy.Bindings, policyPolicy.Bindings) { + return fmt.Errorf("Project and data source policies do not match: project policy is %+v, data resource policy is %+v", derefBindings(projectPolicy.Bindings), derefBindings(policyPolicy.Bindings)) + } + return nil + } +} + +func testAccCheckGoogleProjectIamPolicyIsMerged(projectRes, policyRes, pid string) resource.TestCheckFunc { + return func(s *terraform.State) error { + err := testAccCheckGoogleProjectIamPolicyExists(projectRes, policyRes, pid)(s) + if err != nil { + return err + } + + projectPolicy, err := getGoogleProjectIamPolicyFromState(s, projectRes, pid) + if err != nil { + return fmt.Errorf("Error retrieving IAM policy for project from state: %s", err) + } + + // Merge the project policy in Terraform state with the policy the project had before the config was applied + var expected []*cloudresourcemanager.Binding + expected = append(expected, originalPolicy.Bindings...) + expected = append(expected, projectPolicy.Bindings...) + expected = mergeBindings(expected) + + // Retrieve the actual policy from the project + c := testAccProvider.Meta().(*Config) + actual, err := getProjectIamPolicy(pid, c) + if err != nil { + return fmt.Errorf("Failed to retrieve IAM Policy for project %q: %s", pid, err) + } + // The bindings should match, indicating the policy was successfully applied and merged + if !compareBindings(actual.Bindings, expected) { + return fmt.Errorf("Actual and expected project policies do not match: actual policy is %+v, expected policy is %+v", derefBindings(actual.Bindings), derefBindings(expected)) + } + + return nil + } +} + +func TestIamRolesToMembersBinding(t *testing.T) { + table := []struct { + expect []*cloudresourcemanager.Binding + input map[string]map[string]bool + }{ + { + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{ + "member-1", + "member-2", + }, + }, + }, + input: map[string]map[string]bool{ + "role-1": map[string]bool{ + "member-1": true, + "member-2": true, + }, + }, + }, + { + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{ + "member-1", + "member-2", + }, + }, + }, + input: map[string]map[string]bool{ + "role-1": map[string]bool{ + "member-1": true, + "member-2": true, + }, + }, + }, + { + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{}, + }, + }, + input: map[string]map[string]bool{ + "role-1": map[string]bool{}, + }, + }, + } + + for _, test := range table { + got := rolesToMembersBinding(test.input) + + sort.Sort(sortableBindings(got)) + for i, _ := range got { + sort.Strings(got[i].Members) + } + + if !reflect.DeepEqual(derefBindings(got), derefBindings(test.expect)) { + t.Errorf("got %+v, expected %+v", derefBindings(got), derefBindings(test.expect)) + } + } +} +func TestIamRolesToMembersMap(t *testing.T) { + table := []struct { + input []*cloudresourcemanager.Binding + expect map[string]map[string]bool + }{ + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{ + "member-1", + "member-2", + }, + }, + }, + expect: map[string]map[string]bool{ + "role-1": map[string]bool{ + "member-1": true, + "member-2": true, + }, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{ + "member-1", + "member-2", + "member-1", + "member-2", + }, + }, + }, + expect: map[string]map[string]bool{ + "role-1": map[string]bool{ + "member-1": true, + "member-2": true, + }, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + }, + }, + expect: map[string]map[string]bool{ + "role-1": map[string]bool{}, + }, + }, + } + + for _, test := range table { + got := rolesToMembersMap(test.input) + if !reflect.DeepEqual(got, test.expect) { + t.Errorf("got %+v, expected %+v", got, test.expect) + } + } +} + +func TestIamMergeBindings(t *testing.T) { + table := []struct { + input []*cloudresourcemanager.Binding + expect []cloudresourcemanager.Binding + }{ + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{ + "member-1", + "member-2", + }, + }, + { + Role: "role-1", + Members: []string{ + "member-3", + }, + }, + }, + expect: []cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{ + "member-1", + "member-2", + "member-3", + }, + }, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{ + "member-3", + "member-4", + }, + }, + { + Role: "role-1", + Members: []string{ + "member-2", + "member-1", + }, + }, + { + Role: "role-2", + Members: []string{ + "member-1", + }, + }, + { + Role: "role-1", + Members: []string{ + "member-5", + }, + }, + { + Role: "role-3", + Members: []string{ + "member-1", + }, + }, + { + Role: "role-2", + Members: []string{ + "member-2", + }, + }, + {Role: "empty-role", Members: []string{}}, + }, + expect: []cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{ + "member-1", + "member-2", + "member-3", + "member-4", + "member-5", + }, + }, + { + Role: "role-2", + Members: []string{ + "member-1", + "member-2", + }, + }, + { + Role: "role-3", + Members: []string{ + "member-1", + }, + }, + }, + }, + } + + for _, test := range table { + got := mergeBindings(test.input) + sort.Sort(sortableBindings(got)) + for i, _ := range got { + sort.Strings(got[i].Members) + } + + if !reflect.DeepEqual(derefBindings(got), test.expect) { + t.Errorf("\ngot %+v\nexpected %+v", derefBindings(got), test.expect) + } + } +} + +func derefBindings(b []*cloudresourcemanager.Binding) []cloudresourcemanager.Binding { + db := make([]cloudresourcemanager.Binding, len(b)) + + for i, v := range b { + db[i] = *v + sort.Strings(db[i].Members) + } + return db +} + +// Confirm that a project has an IAM policy with at least 1 binding +func testAccProjectExistingPolicy(pid string) resource.TestCheckFunc { + return func(s *terraform.State) error { + c := testAccProvider.Meta().(*Config) + var err error + originalPolicy, err = getProjectIamPolicy(pid, c) + if err != nil { + return fmt.Errorf("Failed to retrieve IAM Policy for project %q: %s", pid, err) + } + if len(originalPolicy.Bindings) == 0 { + return fmt.Errorf("Refuse to run test against project with zero IAM Bindings. This is likely an error in the test code that is not properly identifying the IAM policy of a project.") + } + return nil + } +} + +func testAccProjectDefaultAssociatePolicyBasic() string { + return fmt.Sprintf(` +resource "google_project_iam_policy" "acceptance" { + policy_data = "${data.google_iam_policy.admin.policy_data}" +} +data "google_iam_policy" "admin" { + binding { + role = "roles/storage.objectViewer" + members = [ + "user:evanbrown@google.com", + ] + } + binding { + role = "roles/compute.instanceAdmin" + members = [ + "user:evanbrown@google.com", + "user:evandbrown@gmail.com", + ] + } +} +`) +} + +func testAccProjectAssociatePolicyBasic(pid, name, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} +resource "google_project_iam_policy" "acceptance" { + project = "${google_project.acceptance.id}" + policy_data = "${data.google_iam_policy.admin.policy_data}" +} +data "google_iam_policy" "admin" { + binding { + role = "roles/storage.objectViewer" + members = [ + "user:evanbrown@google.com", + ] + } + binding { + role = "roles/compute.instanceAdmin" + members = [ + "user:evanbrown@google.com", + "user:evandbrown@gmail.com", + ] + } +} +`, pid, name, org) +} + +func testAccProject_createWithoutOrg(pid, name string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" +}`, pid, name) +} + +func testAccProject_create(pid, name, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +}`, pid, name, org) +} + +func testAccProject_createBilling(pid, name, org, billing string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + billing_account = "%s" +}`, pid, name, org, billing) +} + +func testAccProjectAssociatePolicyExpanded(pid, name, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} +resource "google_project_iam_policy" "acceptance" { + project = "${google_project.acceptance.id}" + policy_data = "${data.google_iam_policy.expanded.policy_data}" + authoritative = false +} +data "google_iam_policy" "expanded" { + binding { + role = "roles/viewer" + members = [ + "user:paddy@carvers.co", + ] + } + + binding { + role = "roles/viewer" + members = [ + "user:paddy@hashicorp.com", + ] + } +}`, pid, name, org) +} diff --git a/provider/terraform/tests/resource_google_project_migrate_test.go b/provider/terraform/tests/resource_google_project_migrate_test.go new file mode 100644 index 000000000000..8aeff36404f0 --- /dev/null +++ b/provider/terraform/tests/resource_google_project_migrate_test.go @@ -0,0 +1,70 @@ +package google + +import ( + "testing" + + "github.com/hashicorp/terraform/terraform" +) + +func TestGoogleProjectMigrateState(t *testing.T) { + cases := map[string]struct { + StateVersion int + Attributes map[string]string + Expected map[string]string + Meta interface{} + }{ + "deprecate policy_data and support creation/deletion": { + StateVersion: 0, + Attributes: map[string]string{}, + Expected: map[string]string{ + "project_id": "test-project", + "skip_delete": "true", + }, + Meta: &Config{}, + }, + } + + for tn, tc := range cases { + is := &terraform.InstanceState{ + ID: "test-project", + Attributes: tc.Attributes, + } + is, err := resourceGoogleProjectMigrateState( + tc.StateVersion, is, tc.Meta) + + if err != nil { + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + for k, v := range tc.Expected { + if is.Attributes[k] != v { + t.Fatalf( + "bad: %s\n\n expected: %#v -> %#v\n got: %#v -> %#v\n in: %#v", + tn, k, v, k, is.Attributes[k], is.Attributes) + } + } + } +} + +func TestGoogleProjectMigrateState_empty(t *testing.T) { + var is *terraform.InstanceState + var meta *Config + + // should handle nil + is, err := resourceGoogleProjectMigrateState(0, is, meta) + + if err != nil { + t.Fatalf("err: %#v", err) + } + if is != nil { + t.Fatalf("expected nil instancestate, got: %#v", is) + } + + // should handle non-nil but empty + is = &terraform.InstanceState{} + is, err = resourceGoogleProjectMigrateState(0, is, meta) + + if err != nil { + t.Fatalf("err: %#v", err) + } +} diff --git a/provider/terraform/tests/resource_google_project_organization_policy_test.go b/provider/terraform/tests/resource_google_project_organization_policy_test.go new file mode 100644 index 000000000000..12067285c244 --- /dev/null +++ b/provider/terraform/tests/resource_google_project_organization_policy_test.go @@ -0,0 +1,354 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "strings" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/cloudresourcemanager/v1" +) + +/* +Tests for `google_project_organization_policy` + +These are *not* run in parallel, as they all use the same project +and I end up with 409 Conflict errors from the API when they are +run in parallel. +*/ + +func TestAccProjectOrganizationPolicy_boolean(t *testing.T) { + projectId := getTestProjectFromEnv() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + // Test creation of an enforced boolean policy + Config: testAccProjectOrganizationPolicy_boolean(projectId, true), + Check: testAccCheckGoogleProjectOrganizationBooleanPolicy("bool", true), + }, + { + // Test update from enforced to not + Config: testAccProjectOrganizationPolicy_boolean(projectId, false), + Check: testAccCheckGoogleProjectOrganizationBooleanPolicy("bool", false), + }, + { + Config: " ", + Destroy: true, + }, + { + // Test creation of a not enforced boolean policy + Config: testAccProjectOrganizationPolicy_boolean(projectId, false), + Check: testAccCheckGoogleProjectOrganizationBooleanPolicy("bool", false), + }, + { + // Test update from not enforced to enforced + Config: testAccProjectOrganizationPolicy_boolean(projectId, true), + Check: testAccCheckGoogleProjectOrganizationBooleanPolicy("bool", true), + }, + }, + }) +} + +func TestAccProjectOrganizationPolicy_list_allowAll(t *testing.T) { + projectId := getTestProjectFromEnv() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccProjectOrganizationPolicy_list_allowAll(projectId), + Check: testAccCheckGoogleProjectOrganizationListPolicyAll("list", "ALLOW"), + }, + }, + }) +} + +func TestAccProjectOrganizationPolicy_list_allowSome(t *testing.T) { + project := getTestProjectFromEnv() + canonicalProject := canonicalProjectId(project) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccProjectOrganizationPolicy_list_allowSome(project), + Check: testAccCheckGoogleProjectOrganizationListPolicyAllowedValues("list", []string{canonicalProject}), + }, + }, + }) +} + +func TestAccProjectOrganizationPolicy_list_denySome(t *testing.T) { + projectId := getTestProjectFromEnv() + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccProjectOrganizationPolicy_list_denySome(projectId), + Check: testAccCheckGoogleProjectOrganizationListPolicyDeniedValues("list", DENIED_ORG_POLICIES), + }, + }, + }) +} + +func TestAccProjectOrganizationPolicy_list_update(t *testing.T) { + projectId := getTestProjectFromEnv() + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccProjectOrganizationPolicy_list_allowAll(projectId), + Check: testAccCheckGoogleProjectOrganizationListPolicyAll("list", "ALLOW"), + }, + { + Config: testAccProjectOrganizationPolicy_list_denySome(projectId), + Check: testAccCheckGoogleProjectOrganizationListPolicyDeniedValues("list", DENIED_ORG_POLICIES), + }, + }, + }) +} + +func TestAccProjectOrganizationPolicy_restore_defaultTrue(t *testing.T) { + projectId := getTestProjectFromEnv() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckGoogleProjectOrganizationPolicyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccProjectOrganizationPolicy_restore_defaultTrue(projectId), + Check: getGoogleProjectOrganizationRestoreDefaultTrue("restore", &cloudresourcemanager.RestoreDefault{}), + }, + }, + }) +} + +func testAccCheckGoogleProjectOrganizationPolicyDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_project_organization_policy" { + continue + } + + projectId := canonicalProjectId(rs.Primary.Attributes["project"]) + constraint := canonicalOrgPolicyConstraint(rs.Primary.Attributes["constraint"]) + policy, err := config.clientResourceManager.Projects.GetOrgPolicy(projectId, &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: constraint, + }).Do() + + if err != nil { + return err + } + + if policy.ListPolicy != nil || policy.BooleanPolicy != nil { + return fmt.Errorf("Org policy with constraint '%s' hasn't been cleared", constraint) + } + } + return nil +} + +func testAccCheckGoogleProjectOrganizationBooleanPolicy(n string, enforced bool) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleProjectOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if policy.BooleanPolicy.Enforced != enforced { + return fmt.Errorf("Expected boolean policy enforcement to be '%t', got '%t'", enforced, policy.BooleanPolicy.Enforced) + } + + return nil + } +} + +func testAccCheckGoogleProjectOrganizationListPolicyAll(n, policyType string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleProjectOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if policy.ListPolicy == nil { + return nil + } + + if len(policy.ListPolicy.AllowedValues) > 0 || len(policy.ListPolicy.DeniedValues) > 0 { + return fmt.Errorf("The `values` field shouldn't be set") + } + + if policy.ListPolicy.AllValues != policyType { + return fmt.Errorf("The list policy should %s all values", policyType) + } + + return nil + } +} + +func testAccCheckGoogleProjectOrganizationListPolicyAllowedValues(n string, values []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleProjectOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + sort.Strings(policy.ListPolicy.AllowedValues) + sort.Strings(values) + if !reflect.DeepEqual(policy.ListPolicy.AllowedValues, values) { + return fmt.Errorf("Expected the list policy to allow '%s', instead allowed '%s'", values, policy.ListPolicy.AllowedValues) + } + + return nil + } +} + +func testAccCheckGoogleProjectOrganizationListPolicyDeniedValues(n string, values []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + policy, err := getGoogleProjectOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + sort.Strings(policy.ListPolicy.DeniedValues) + sort.Strings(values) + if !reflect.DeepEqual(policy.ListPolicy.DeniedValues, values) { + return fmt.Errorf("Expected the list policy to deny '%s', instead denied '%s'", values, policy.ListPolicy.DeniedValues) + } + + return nil + } +} + +func getGoogleProjectOrganizationRestoreDefaultTrue(n string, policyDefault *cloudresourcemanager.RestoreDefault) resource.TestCheckFunc { + return func(s *terraform.State) error { + + policy, err := getGoogleProjectOrganizationPolicyTestResource(s, n) + if err != nil { + return err + } + + if !reflect.DeepEqual(policy.RestoreDefault, policyDefault) { + return fmt.Errorf("Expected the restore default '%s', instead denied, %s", policyDefault, policy.RestoreDefault) + } + + return nil + } +} + +func getGoogleProjectOrganizationPolicyTestResource(s *terraform.State, n string) (*cloudresourcemanager.OrgPolicy, error) { + rn := "google_project_organization_policy." + n + rs, ok := s.RootModule().Resources[rn] + if !ok { + return nil, fmt.Errorf("Not found: %s", rn) + } + + if rs.Primary.ID == "" { + return nil, fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + projectId := canonicalProjectId(rs.Primary.Attributes["project"]) + + return config.clientResourceManager.Projects.GetOrgPolicy(projectId, &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: rs.Primary.Attributes["constraint"], + }).Do() +} + +func testAccProjectOrganizationPolicy_boolean(pid string, enforced bool) string { + return fmt.Sprintf(` +resource "google_project_organization_policy" "bool" { + project = "%s" + constraint = "constraints/compute.disableSerialPortAccess" + + boolean_policy { + enforced = %t + } +} +`, pid, enforced) +} + +func testAccProjectOrganizationPolicy_list_allowAll(pid string) string { + return fmt.Sprintf(` +resource "google_project_organization_policy" "list" { + project = "%s" + constraint = "constraints/serviceuser.services" + + list_policy { + allow { + all = true + } + } +} +`, pid) +} + +func testAccProjectOrganizationPolicy_list_allowSome(pid string) string { + return fmt.Sprintf(` + +resource "google_project_organization_policy" "list" { + project = "%s" + constraint = "constraints/compute.trustedImageProjects" + + list_policy { + allow { + values = ["projects/%s"] + } + } +} +`, pid, pid) +} + +func testAccProjectOrganizationPolicy_list_denySome(pid string) string { + return fmt.Sprintf(` + +resource "google_project_organization_policy" "list" { + project = "%s" + constraint = "constraints/serviceuser.services" + + list_policy { + deny { + values = [ + "doubleclicksearch.googleapis.com", + "replicapoolupdater.googleapis.com", + ] + } + } +} +`, pid) +} + +func testAccProjectOrganizationPolicy_restore_defaultTrue(pid string) string { + return fmt.Sprintf(` +resource "google_project_organization_policy" "restore" { + project = "%s" + constraint = "constraints/serviceuser.services" + + restore_policy { + default = true + } +} +`, pid) +} + +func canonicalProjectId(project string) string { + if strings.HasPrefix(project, "projects/") { + return project + } + return fmt.Sprintf("projects/%s", project) +} diff --git a/provider/terraform/tests/resource_google_project_service_test.go b/provider/terraform/tests/resource_google_project_service_test.go new file mode 100644 index 000000000000..8f4b0e08bf42 --- /dev/null +++ b/provider/terraform/tests/resource_google_project_service_test.go @@ -0,0 +1,189 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +// Test that services can be enabled and disabled on a project +func TestAccProjectService_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + services := []string{"iam.googleapis.com", "cloudresourcemanager.googleapis.com"} + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccProjectService_basic(services, pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectService(services, pid, true), + ), + }, + resource.TestStep{ + ResourceName: "google_project_service.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"disable_on_destroy"}, + }, + resource.TestStep{ + ResourceName: "google_project_service.test2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"disable_on_destroy"}, + }, + // Use a separate TestStep rather than a CheckDestroy because we need the project to still exist. + resource.TestStep{ + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectService(services, pid, false), + ), + }, + // Create services with disabling turned off. + resource.TestStep{ + Config: testAccProjectService_noDisable(services, pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectService(services, pid, true), + ), + }, + // Check that services are still enabled even after the resources are deleted. + resource.TestStep{ + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectService(services, pid, true), + ), + }, + }, + }) +} + +func TestAccProjectService_handleNotFound(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + service := "iam.googleapis.com" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccProjectService_handleNotFound(service, pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectService([]string{service}, pid, true), + ), + }, + // Delete the project, implicitly deletes service, expect the plan to want to create the service again + resource.TestStep{ + Config: testAccProjectService_handleNotFoundNoProject(service, pid), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +func testAccCheckProjectService(services []string, pid string, expectEnabled bool) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + apiServices, err := getApiServices(pid, config, map[string]struct{}{}) + if err != nil { + return fmt.Errorf("Error listing services for project %q: %v", pid, err) + } + + for _, expected := range services { + exists := false + for _, actual := range apiServices { + if expected == actual { + exists = true + } + } + if expectEnabled && !exists { + return fmt.Errorf("Expected service %s is not enabled server-side (found %v)", expected, apiServices) + } + if !expectEnabled && exists { + return fmt.Errorf("Expected disabled service %s is enabled server-side", expected) + } + } + + return nil + } +} + +func testAccProjectService_basic(services []string, pid, name, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_service" "test" { + project = "${google_project.acceptance.project_id}" + service = "%s" +} + +resource "google_project_service" "test2" { + project = "${google_project.acceptance.project_id}" + service = "%s" +} +`, pid, name, org, services[0], services[1]) +} + +func testAccProjectService_noDisable(services []string, pid, name, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +resource "google_project_service" "test" { + project = "${google_project.acceptance.project_id}" + service = "%s" + disable_on_destroy = false +} + +resource "google_project_service" "test2" { + project = "${google_project.acceptance.project_id}" + service = "%s" + disable_on_destroy = false +} +`, pid, name, org, services[0], services[1]) +} + +func testAccProjectService_handleNotFound(service, pid, name, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} + +// by passing through locals, we break the dependency chain +// see terraform-provider-google#1292 +locals { + project_id = "${google_project.acceptance.project_id}" +} + +resource "google_project_service" "test" { + project = "${local.project_id}" + service = "%s" +} +`, pid, name, org, service) +} + +func testAccProjectService_handleNotFoundNoProject(service, pid string) string { + return fmt.Sprintf(` +resource "google_project_service" "test" { + project = "%s" + service = "%s" +} +`, pid, service) +} diff --git a/provider/terraform/tests/resource_google_project_services_test.go b/provider/terraform/tests/resource_google_project_services_test.go new file mode 100644 index 000000000000..80983106d03d --- /dev/null +++ b/provider/terraform/tests/resource_google_project_services_test.go @@ -0,0 +1,322 @@ +package google + +import ( + "bytes" + "fmt" + "log" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +// Test that services can be enabled and disabled on a project +func TestAccProjectServices_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + services1 := []string{"iam.googleapis.com", "cloudresourcemanager.googleapis.com"} + services2 := []string{"cloudresourcemanager.googleapis.com"} + oobService := "iam.googleapis.com" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project with some services + resource.TestStep{ + Config: testAccProjectAssociateServicesBasic(services1, pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testProjectServicesMatch(services1, pid), + ), + }, + // Update services to remove one + resource.TestStep{ + Config: testAccProjectAssociateServicesBasic(services2, pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testProjectServicesMatch(services2, pid), + ), + }, + // Add a service out-of-band and ensure it is removed + resource.TestStep{ + PreConfig: func() { + config := testAccProvider.Meta().(*Config) + enableService(oobService, pid, config) + }, + Config: testAccProjectAssociateServicesBasic(services2, pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testProjectServicesMatch(services2, pid), + ), + }, + resource.TestStep{ + ResourceName: "google_project_services.acceptance", + ImportState: true, + ImportStateId: pid, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"disable_on_destroy"}, + }, + }, + }) +} + +// Test that services are authoritative when a project has existing +// sevices not represented in config +func TestAccProjectServices_authoritative(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + services := []string{"cloudresourcemanager.googleapis.com"} + oobService := "iam.googleapis.com" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project with no services + resource.TestStep{ + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectExists("google_project.acceptance", pid), + ), + }, + // Add a service out-of-band, then apply a config that creates a service. + // It should remove the out-of-band service. + resource.TestStep{ + PreConfig: func() { + config := testAccProvider.Meta().(*Config) + enableService(oobService, pid, config) + }, + Config: testAccProjectAssociateServicesBasic(services, pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testProjectServicesMatch(services, pid), + ), + }, + }, + }) +} + +// Test that services are authoritative when a project has existing +// sevices, some which are represented in the config and others +// that are not +func TestAccProjectServices_authoritative2(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + oobServices := []string{"iam.googleapis.com", "cloudresourcemanager.googleapis.com"} + services := []string{"iam.googleapis.com"} + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // Create a new project with no services + resource.TestStep{ + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectExists("google_project.acceptance", pid), + ), + }, + // Add a service out-of-band, then apply a config that creates a service. + // It should remove the out-of-band service. + resource.TestStep{ + PreConfig: func() { + config := testAccProvider.Meta().(*Config) + for _, s := range oobServices { + enableService(s, pid, config) + } + }, + Config: testAccProjectAssociateServicesBasic(services, pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testProjectServicesMatch(services, pid), + ), + }, + }, + }) +} + +// Test that services that can't be enabled on their own (such as dataproc-control.googleapis.com) +// don't end up causing diffs when they are enabled as a side-effect of a different service's +// enablement. +func TestAccProjectServices_ignoreUnenablableServices(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + billingId := getTestBillingAccountFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + services := []string{ + "dataproc.googleapis.com", + // The following services are enabled as a side-effect of dataproc's enablement + "storage-component.googleapis.com", + "deploymentmanager.googleapis.com", + "replicapool.googleapis.com", + "replicapoolupdater.googleapis.com", + "resourceviews.googleapis.com", + "compute.googleapis.com", + "container.googleapis.com", + "containerregistry.googleapis.com", + "storage-api.googleapis.com", + "pubsub.googleapis.com", + "oslogin.googleapis.com", + "bigquery-json.googleapis.com", + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccProjectAssociateServicesBasic_withBilling(services, pid, pname, org, billingId), + Check: resource.ComposeTestCheckFunc( + testProjectServicesMatch(services, pid), + ), + }, + }, + }) +} + +func TestAccProjectServices_pagination(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + billingId := getTestBillingAccountFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + + // we need at least 50 services (doesn't matter what they are) to exercise the + // pagination handling code. + services := []string{ + "actions.googleapis.com", + "appengine.googleapis.com", + "appengineflex.googleapis.com", + "bigquery-json.googleapis.com", + "bigquerydatatransfer.googleapis.com", + "bigtableadmin.googleapis.com", + "bigtabletableadmin.googleapis.com", + "cloudbuild.googleapis.com", + "clouderrorreporting.googleapis.com", + "cloudfunctions.googleapis.com", + "cloudiot.googleapis.com", + "cloudkms.googleapis.com", + "cloudmonitoring.googleapis.com", + "cloudresourcemanager.googleapis.com", + "cloudtrace.googleapis.com", + "compute.googleapis.com", + "container.googleapis.com", + "containerregistry.googleapis.com", + "dataflow.googleapis.com", + "dataproc.googleapis.com", + "datastore.googleapis.com", + "deploymentmanager.googleapis.com", + "dialogflow.googleapis.com", + "dns.googleapis.com", + "endpoints.googleapis.com", + "firebaserules.googleapis.com", + "firestore.googleapis.com", + "genomics.googleapis.com", + "iam.googleapis.com", + "language.googleapis.com", + "logging.googleapis.com", + "ml.googleapis.com", + "monitoring.googleapis.com", + "oslogin.googleapis.com", + "pubsub.googleapis.com", + "replicapool.googleapis.com", + "replicapoolupdater.googleapis.com", + "resourceviews.googleapis.com", + "runtimeconfig.googleapis.com", + "servicecontrol.googleapis.com", + "servicemanagement.googleapis.com", + "sourcerepo.googleapis.com", + "spanner.googleapis.com", + "speech.googleapis.com", + "sql-component.googleapis.com", + "storage-api.googleapis.com", + "storage-component.googleapis.com", + "storagetransfer.googleapis.com", + "testing.googleapis.com", + "toolresults.googleapis.com", + "translate.googleapis.com", + "videointelligence.googleapis.com", + "vision.googleapis.com", + "zync.googleapis.com", + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccProjectAssociateServicesBasic_withBilling(services, pid, pname, org, billingId), + Check: resource.ComposeTestCheckFunc( + testProjectServicesMatch(services, pid), + ), + }, + }, + }) +} + +func testAccProjectAssociateServicesBasic(services []string, pid, name, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +} +resource "google_project_services" "acceptance" { + project = "${google_project.acceptance.project_id}" + services = [%s] + disable_on_destroy = true +} +`, pid, name, org, testStringsToString(services)) +} + +func testAccProjectAssociateServicesBasic_withBilling(services []string, pid, name, org, billing string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + billing_account = "%s" +} +resource "google_project_services" "acceptance" { + project = "${google_project.acceptance.project_id}" + services = [%s] + disable_on_destroy = false +} +`, pid, name, org, billing, testStringsToString(services)) +} + +func testProjectServicesMatch(services []string, pid string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + apiServices, err := getApiServices(pid, config, ignoreProjectServices) + if err != nil { + return fmt.Errorf("Error listing services for project %q: %v", pid, err) + } + + sort.Strings(services) + sort.Strings(apiServices) + if !reflect.DeepEqual(services, apiServices) { + return fmt.Errorf("Services in config (%v) do not exactly match services returned by API (%v)", services, apiServices) + } + + return nil + } +} + +func testStringsToString(s []string) string { + var b bytes.Buffer + for i, v := range s { + b.WriteString(fmt.Sprintf("\"%s\"", v)) + if i < len(s)-1 { + b.WriteString(",") + } + } + r := b.String() + log.Printf("[DEBUG]: Converted list of strings to %s", r) + return b.String() +} diff --git a/provider/terraform/tests/resource_google_project_test.go b/provider/terraform/tests/resource_google_project_test.go new file mode 100644 index 000000000000..dad6333d8d42 --- /dev/null +++ b/provider/terraform/tests/resource_google_project_test.go @@ -0,0 +1,576 @@ +package google + +import ( + "fmt" + "os" + "reflect" + "strconv" + "strings" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/cloudresourcemanager/v1" +) + +var ( + pname = "Terraform Acceptance Tests" + originalPolicy *cloudresourcemanager.Policy +) + +// Test that a Project resource can be created without an organization +func TestAccProject_createWithoutOrg(t *testing.T) { + t.Parallel() + + creds := multiEnvSearch(credsEnvVars) + if strings.Contains(creds, "iam.gserviceaccount.com") { + t.Skip("Service accounts cannot create projects without a parent. Requires user credentials.") + } + + pid := "terraform-" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // This step creates a new project + resource.TestStep{ + Config: testAccProject_createWithoutOrg(pid, pname), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectExists("google_project.acceptance", pid), + ), + }, + }, + }) +} + +// Test that a Project resource can be created and an IAM policy +// associated +func TestAccProject_create(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // This step creates a new project + resource.TestStep{ + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectExists("google_project.acceptance", pid), + ), + }, + }, + }) +} + +// Test that a Project resource can be created with an associated +// billing account +func TestAccProject_billing(t *testing.T) { + t.Parallel() + org := getTestOrgFromEnv(t) + skipIfEnvNotSet(t, "GOOGLE_BILLING_ACCOUNT_2") + billingId2 := os.Getenv("GOOGLE_BILLING_ACCOUNT_2") + billingId := getTestBillingAccountFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + // This step creates a new project with a billing account + resource.TestStep{ + Config: testAccProject_createBilling(pid, pname, org, billingId), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectHasBillingAccount("google_project.acceptance", pid, billingId), + ), + }, + // Make sure import supports billing account + resource.TestStep{ + ResourceName: "google_project.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + // Update to a different billing account + resource.TestStep{ + Config: testAccProject_createBilling(pid, pname, org, billingId2), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectHasBillingAccount("google_project.acceptance", pid, billingId2), + ), + }, + // Unlink the billing account + resource.TestStep{ + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectHasBillingAccount("google_project.acceptance", pid, ""), + ), + }, + }, + }) +} + +// Test that a Project resource can be created with labels +func TestAccProject_labels(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccProject_labels(pid, pname, org, map[string]string{"test": "that"}), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectHasLabels("google_project.acceptance", pid, map[string]string{"test": "that"}), + ), + }, + // Make sure import supports labels + { + ResourceName: "google_project.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + // update project with labels + { + Config: testAccProject_labels(pid, pname, org, map[string]string{"label": "label-value"}), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectExists("google_project.acceptance", pid), + testAccCheckGoogleProjectHasLabels("google_project.acceptance", pid, map[string]string{"label": "label-value"}), + ), + }, + // update project delete labels + { + Config: testAccProject_create(pid, pname, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectExists("google_project.acceptance", pid), + testAccCheckGoogleProjectHasNoLabels("google_project.acceptance", pid), + ), + }, + }, + }) +} + +func TestAccProject_deleteDefaultNetwork(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + billingId := getTestBillingAccountFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccProject_deleteDefaultNetwork(pid, pname, org, billingId), + }, + }, + }) +} + +func TestAccProject_parentFolder(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := "terraform-" + acctest.RandString(10) + folderDisplayName := "tf-test-" + acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccProject_parentFolder(pid, pname, folderDisplayName, org), + }, + }, + }) +} + +func TestAccProject_appEngineBasic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := acctest.RandomWithPrefix("tf-test") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccProject_appEngineBasic(pid, org), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.name"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.url_dispatch_rule.#"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.code_bucket"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.default_hostname"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.default_bucket"), + ), + }, + resource.TestStep{ + ResourceName: "google_project.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccProject_appEngineBasicWithBilling(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := acctest.RandomWithPrefix("tf-test") + billingId := getTestBillingAccountFromEnv(t) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccProject_appEngineBasicWithBilling(pid, org, billingId), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.name"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.url_dispatch_rule.#"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.code_bucket"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.default_hostname"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.default_bucket"), + ), + }, + resource.TestStep{ + ResourceName: "google_project.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccProject_appEngineUpdate(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := acctest.RandomWithPrefix("tf-test") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccProject_appEngineNoApp(pid, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleProjectExists("google_project.acceptance", pid), + ), + }, + { + Config: testAccProject_appEngineBasic(pid, org), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.name"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.url_dispatch_rule.#"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.code_bucket"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.default_hostname"), + resource.TestCheckResourceAttrSet("google_project.acceptance", "app_engine.0.default_bucket"), + ), + }, + resource.TestStep{ + ResourceName: "google_project.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccProject_appEngineUpdate(pid, org), + }, + resource.TestStep{ + ResourceName: "google_project.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccProject_appEngineFeatureSettings(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + pid := acctest.RandomWithPrefix("tf-test") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccProject_appEngineFeatureSettings(pid, org), + }, + resource.TestStep{ + ResourceName: "google_project.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccProject_appEngineFeatureSettingsUpdate(pid, org), + }, + resource.TestStep{ + ResourceName: "google_project.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckGoogleProjectExists(r, pid string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[r] + if !ok { + return fmt.Errorf("Not found: %s", r) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + if rs.Primary.ID != pid { + return fmt.Errorf("Expected project %q to match ID %q in state", pid, rs.Primary.ID) + } + + return nil + } +} + +func testAccCheckGoogleProjectHasBillingAccount(r, pid, billingId string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[r] + if !ok { + return fmt.Errorf("Not found: %s", r) + } + + // State should match expected + if rs.Primary.Attributes["billing_account"] != billingId { + return fmt.Errorf("Billing ID in state (%s) does not match expected value (%s)", rs.Primary.Attributes["billing_account"], billingId) + } + + // Actual value in API should match state and expected + // Read the billing account + config := testAccProvider.Meta().(*Config) + ba, err := config.clientBilling.Projects.GetBillingInfo(prefixedProject(pid)).Do() + if err != nil { + return fmt.Errorf("Error reading billing account for project %q: %v", prefixedProject(pid), err) + } + if billingId != strings.TrimPrefix(ba.BillingAccountName, "billingAccounts/") { + return fmt.Errorf("Billing ID returned by API (%s) did not match expected value (%s)", ba.BillingAccountName, billingId) + } + return nil + } +} + +func testAccCheckGoogleProjectHasLabels(r, pid string, expected map[string]string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[r] + if !ok { + return fmt.Errorf("Not found: %s", r) + } + + // State should have the same number of labels + if rs.Primary.Attributes["labels.%"] != strconv.Itoa(len(expected)) { + return fmt.Errorf("Expected %d labels, got %s", len(expected), rs.Primary.Attributes["labels.%"]) + } + + // Actual value in API should match state and expected + config := testAccProvider.Meta().(*Config) + + found, err := config.clientResourceManager.Projects.Get(pid).Do() + if err != nil { + return err + } + + actual := found.Labels + if !reflect.DeepEqual(actual, expected) { + // Determine only the different attributes + for k, v := range expected { + if av, ok := actual[k]; ok && v == av { + delete(expected, k) + delete(actual, k) + } + } + + spewConf := spew.NewDefaultConfig() + spewConf.SortKeys = true + return fmt.Errorf( + "Labels not equivalent. Difference is shown below. Top is actual, bottom is expected."+ + "\n\n%s\n\n%s", + spewConf.Sdump(actual), spewConf.Sdump(expected), + ) + } + return nil + } +} + +func testAccCheckGoogleProjectHasNoLabels(r, pid string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[r] + if !ok { + return fmt.Errorf("Not found: %s", r) + } + + // State should have zero labels + if rs.Primary.Attributes["labels.%"] != "0" { + return fmt.Errorf("Expected 0 labels, got %s", rs.Primary.Attributes["labels.%"]) + } + + // Actual value in API should match state and expected + config := testAccProvider.Meta().(*Config) + + found, err := config.clientResourceManager.Projects.Get(pid).Do() + if err != nil { + return err + } + + spewConf := spew.NewDefaultConfig() + spewConf.SortKeys = true + if found.Labels != nil { + return fmt.Errorf("Labels should be empty. Actual \n%s", spewConf.Sdump(found.Labels)) + } + return nil + } +} + +func testAccProject_labels(pid, name, org string, labels map[string]string) string { + r := fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + labels {`, pid, name, org) + + l := "" + for key, value := range labels { + l += fmt.Sprintf("%q = %q\n", key, value) + } + + l += fmt.Sprintf("}\n}") + return r + l +} + +func testAccProject_deleteDefaultNetwork(pid, name, org, billing string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + billing_account = "%s" # requires billing to enable compute API + auto_create_network = false +}`, pid, name, org, billing) +} + +func testAccProject_parentFolder(pid, projectName, folderName, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + # ensures we can set both org_id and folder_id as long as only one is not empty. + org_id = "" + folder_id = "${google_folder.folder1.id}" +} + +resource "google_folder" "folder1" { + display_name = "%s" + parent = "organizations/%s" +} + +`, pid, projectName, folderName, org) +} + +func testAccProject_appEngineNoApp(pid, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" +}`, pid, pid, org) +} + +func testAccProject_appEngineBasic(pid, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + + app_engine { + auth_domain = "hashicorptest.com" + location_id = "us-central" + serving_status = "SERVING" + } +}`, pid, pid, org) +} + +func testAccProject_appEngineBasicWithBilling(pid, org, billing string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + + billing_account = "%s" + + app_engine { + auth_domain = "hashicorptest.com" + location_id = "us-central" + serving_status = "SERVING" + } +}`, pid, pid, org, billing) +} + +func testAccProject_appEngineUpdate(pid, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + + app_engine { + auth_domain = "tf-test.club" + location_id = "us-central" + serving_status = "USER_DISABLED" + } +}`, pid, pid, org) +} + +func testAccProject_appEngineFeatureSettings(pid, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + + app_engine { + location_id = "us-central" + + feature_settings { + "split_health_checks" = true + } + } +}`, pid, pid, org) +} + +func testAccProject_appEngineFeatureSettingsUpdate(pid, org string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + project_id = "%s" + name = "%s" + org_id = "%s" + + app_engine { + location_id = "us-central" + + feature_settings { + "split_health_checks" = false + } + } +}`, pid, pid, org) +} + +func skipIfEnvNotSet(t *testing.T, envs ...string) { + for _, k := range envs { + if os.Getenv(k) == "" { + t.Skipf("Environment variable %s is not set", k) + } + } +} diff --git a/provider/terraform/tests/resource_google_service_account_iam_test.go b/provider/terraform/tests/resource_google_service_account_iam_test.go new file mode 100644 index 000000000000..1e183d870059 --- /dev/null +++ b/provider/terraform/tests/resource_google_service_account_iam_test.go @@ -0,0 +1,167 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccServiceAccountIamBinding(t *testing.T) { + t.Parallel() + + account := acctest.RandomWithPrefix("tf-test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccServiceAccountIamBinding_basic(account), + Check: testAccCheckGoogleServiceAccountIam(account, "roles/viewer", []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_service_account_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s %s", getServiceAccountCanonicalId(account), "roles/viewer"), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccServiceAccountIamMember(t *testing.T) { + t.Parallel() + + account := acctest.RandomWithPrefix("tf-test") + identity := fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccServiceAccountIamMember_basic(account), + Check: testAccCheckGoogleServiceAccountIam(account, "roles/editor", []string{identity}), + }, + { + ResourceName: "google_service_account_iam_member.foo", + ImportStateId: fmt.Sprintf("%s %s %s", getServiceAccountCanonicalId(account), "roles/editor", identity), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccServiceAccountIamPolicy(t *testing.T) { + t.Parallel() + + account := acctest.RandomWithPrefix("tf-test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccServiceAccountIamPolicy_basic(account), + Check: testAccCheckGoogleServiceAccountIam(account, "roles/owner", []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_service_account_iam_policy.foo", + ImportStateId: getServiceAccountCanonicalId(account), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckGoogleServiceAccountIam(account, role string, members []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + p, err := config.clientIAM.Projects.ServiceAccounts.GetIamPolicy(getServiceAccountCanonicalId(account)).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == role { + sort.Strings(members) + sort.Strings(binding.Members) + + if reflect.DeepEqual(members, binding.Members) { + return nil + } + + return fmt.Errorf("Binding found but expected members is %v, got %v", members, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", role) + } +} + +func getServiceAccountCanonicalId(account string) string { + return fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", getTestProjectFromEnv(), account, getTestProjectFromEnv()) +} + +func testAccServiceAccountIamBinding_basic(account string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +resource "google_service_account_iam_binding" "foo" { + service_account_id = "${google_service_account.test_account.id}" + role = "roles/viewer" + members = ["serviceAccount:${google_service_account.test_account.email}"] +} +`, account) +} + +func testAccServiceAccountIamMember_basic(account string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +resource "google_service_account_iam_member" "foo" { + service_account_id = "${google_service_account.test_account.id}" + role = "roles/editor" + member = "serviceAccount:${google_service_account.test_account.email}" +} +`, account) +} + +func testAccServiceAccountIamPolicy_basic(account string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +data "google_iam_policy" "foo" { + binding { + role = "roles/owner" + + members = ["serviceAccount:${google_service_account.test_account.email}"] + } +} + +resource "google_service_account_iam_policy" "foo" { + service_account_id = "${google_service_account.test_account.id}" + policy_data = "${data.google_iam_policy.foo.policy_data}" +} +`, account) +} diff --git a/provider/terraform/tests/resource_google_service_account_key_test.go b/provider/terraform/tests/resource_google_service_account_key_test.go new file mode 100644 index 000000000000..66d1c77b3f93 --- /dev/null +++ b/provider/terraform/tests/resource_google_service_account_key_test.go @@ -0,0 +1,175 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +// Test that a service account key can be created and destroyed +func TestAccServiceAccountKey_basic(t *testing.T) { + t.Parallel() + + resourceName := "google_service_account_key.acceptance" + accountID := "a" + acctest.RandString(10) + displayName := "Terraform Test" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccServiceAccountKey(accountID, displayName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleServiceAccountKeyExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, "public_key"), + resource.TestCheckResourceAttrSet(resourceName, "valid_after"), + resource.TestCheckResourceAttrSet(resourceName, "valid_before"), + resource.TestCheckResourceAttrSet(resourceName, "private_key"), + ), + }, + }, + }) +} + +func TestAccServiceAccountKey_fromEmail(t *testing.T) { + t.Parallel() + + resourceName := "google_service_account_key.acceptance" + accountID := "a" + acctest.RandString(10) + displayName := "Terraform Test" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccServiceAccountKey_fromEmail(accountID, displayName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleServiceAccountKeyExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, "public_key"), + resource.TestCheckResourceAttrSet(resourceName, "valid_after"), + resource.TestCheckResourceAttrSet(resourceName, "valid_before"), + resource.TestCheckResourceAttrSet(resourceName, "private_key"), + ), + }, + }, + }) +} + +func TestAccServiceAccountKey_pgp(t *testing.T) { + t.Parallel() + resourceName := "google_service_account_key.acceptance" + accountID := "a" + acctest.RandString(10) + displayName := "Terraform Test" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccServiceAccountKey_pgp(accountID, displayName, testKeyPairPubKey1), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleServiceAccountKeyExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, "public_key"), + resource.TestCheckResourceAttrSet(resourceName, "private_key_encrypted"), + resource.TestCheckResourceAttrSet(resourceName, "private_key_fingerprint"), + ), + }, + }, + }) +} + +func testAccCheckGoogleServiceAccountKeyExists(r string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + rs, ok := s.RootModule().Resources[r] + if !ok { + return fmt.Errorf("Not found: %s", r) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + config := testAccProvider.Meta().(*Config) + + _, err := config.clientIAM.Projects.ServiceAccounts.Keys.Get(rs.Primary.ID).Do() + if err != nil { + return err + } + + return nil + } +} + +func testAccServiceAccountKey(account, name string) string { + return fmt.Sprintf(` +resource "google_service_account" "acceptance" { + account_id = "%s" + display_name = "%s" +} + +resource "google_service_account_key" "acceptance" { + service_account_id = "${google_service_account.acceptance.name}" + public_key_type = "TYPE_X509_PEM_FILE" +} +`, account, name) +} + +func testAccServiceAccountKey_fromEmail(account, name string) string { + return fmt.Sprintf(` +resource "google_service_account" "acceptance" { + account_id = "%s" + display_name = "%s" +} + +resource "google_service_account_key" "acceptance" { + service_account_id = "${google_service_account.acceptance.email}" + public_key_type = "TYPE_X509_PEM_FILE" +} +`, account, name) +} + +func testAccServiceAccountKey_pgp(account, name string, key string) string { + return fmt.Sprintf(` +resource "google_service_account" "acceptance" { + account_id = "%s" + display_name = "%s" +} + +resource "google_service_account_key" "acceptance" { + service_account_id = "${google_service_account.acceptance.name}" + public_key_type = "TYPE_X509_PEM_FILE" + pgp_key = <=ERROR" +} +`, exclusionName, billingAccount, description, getTestProjectFromEnv()) +} diff --git a/provider/terraform/tests/resource_logging_billing_account_sink_test.go b/provider/terraform/tests/resource_logging_billing_account_sink_test.go new file mode 100644 index 000000000000..4ce2e7898c61 --- /dev/null +++ b/provider/terraform/tests/resource_logging_billing_account_sink_test.go @@ -0,0 +1,223 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/logging/v2" +) + +func TestAccLoggingBillingAccountSink_basic(t *testing.T) { + t.Parallel() + + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + billingAccount := getTestBillingAccountFromEnv(t) + + var sink logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingBillingAccountSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingBillingAccountSink_basic(sinkName, bucketName, billingAccount), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingBillingAccountSinkExists("google_logging_billing_account_sink.basic", &sink), + testAccCheckLoggingBillingAccountSink(&sink, "google_logging_billing_account_sink.basic"), + ), + }, { + ResourceName: "google_logging_billing_account_sink.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingBillingAccountSink_update(t *testing.T) { + t.Parallel() + + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + updatedBucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + billingAccount := getTestBillingAccountFromEnv(t) + + var sinkBefore, sinkAfter logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingBillingAccountSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingBillingAccountSink_update(sinkName, bucketName, billingAccount), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingBillingAccountSinkExists("google_logging_billing_account_sink.update", &sinkBefore), + testAccCheckLoggingBillingAccountSink(&sinkBefore, "google_logging_billing_account_sink.update"), + ), + }, { + Config: testAccLoggingBillingAccountSink_update(sinkName, updatedBucketName, billingAccount), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingBillingAccountSinkExists("google_logging_billing_account_sink.update", &sinkAfter), + testAccCheckLoggingBillingAccountSink(&sinkAfter, "google_logging_billing_account_sink.update"), + ), + }, { + ResourceName: "google_logging_billing_account_sink.update", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) + + // Destination should have changed, but WriterIdentity should be the same + if sinkBefore.Destination == sinkAfter.Destination { + t.Errorf("Expected Destination to change, but it didn't: Destination = %#v", sinkBefore.Destination) + } + if sinkBefore.WriterIdentity != sinkAfter.WriterIdentity { + t.Errorf("Expected WriterIdentity to be the same, but it differs: before = %#v, after = %#v", + sinkBefore.WriterIdentity, sinkAfter.WriterIdentity) + } +} + +func TestAccLoggingBillingAccountSink_heredoc(t *testing.T) { + t.Parallel() + + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + billingAccount := getTestBillingAccountFromEnv(t) + + var sink logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingBillingAccountSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingBillingAccountSink_heredoc(sinkName, bucketName, billingAccount), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingBillingAccountSinkExists("google_logging_billing_account_sink.heredoc", &sink), + testAccCheckLoggingBillingAccountSink(&sink, "google_logging_billing_account_sink.heredoc"), + ), + }, { + ResourceName: "google_logging_billing_account_sink.heredoc", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckLoggingBillingAccountSinkDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_logging_billing_account_sink" { + continue + } + + attributes := rs.Primary.Attributes + + _, err := config.clientLogging.BillingAccounts.Sinks.Get(attributes["id"]).Do() + if err == nil { + return fmt.Errorf("billing sink still exists") + } + } + + return nil +} + +func testAccCheckLoggingBillingAccountSinkExists(n string, sink *logging.LogSink) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + config := testAccProvider.Meta().(*Config) + + si, err := config.clientLogging.BillingAccounts.Sinks.Get(attributes["id"]).Do() + if err != nil { + return err + } + *sink = *si + + return nil + } +} + +func testAccCheckLoggingBillingAccountSink(sink *logging.LogSink, n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + + if sink.Destination != attributes["destination"] { + return fmt.Errorf("mismatch on destination: api has %s but client has %s", sink.Destination, attributes["destination"]) + } + + if sink.Filter != attributes["filter"] { + return fmt.Errorf("mismatch on filter: api has %s but client has %s", sink.Filter, attributes["filter"]) + } + + if sink.WriterIdentity != attributes["writer_identity"] { + return fmt.Errorf("mismatch on writer_identity: api has %s but client has %s", sink.WriterIdentity, attributes["writer_identity"]) + } + + return nil + } +} + +func testAccLoggingBillingAccountSink_basic(name, bucketName, billingAccount string) string { + return fmt.Sprintf(` +resource "google_logging_billing_account_sink" "basic" { + name = "%s" + billing_account = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +}`, name, billingAccount, getTestProjectFromEnv(), bucketName) +} + +func testAccLoggingBillingAccountSink_update(name, bucketName, billingAccount string) string { + return fmt.Sprintf(` +resource "google_logging_billing_account_sink" "update" { + name = "%s" + billing_account = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +}`, name, billingAccount, getTestProjectFromEnv(), bucketName) +} + +func testAccLoggingBillingAccountSink_heredoc(name, bucketName, billingAccount string) string { + return fmt.Sprintf(` +resource "google_logging_billing_account_sink" "heredoc" { + name = "%s" + billing_account = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = <=ERROR + + + + EOS +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +}`, name, billingAccount, getTestProjectFromEnv(), bucketName) +} diff --git a/provider/terraform/tests/resource_logging_folder_exclusion_test.go b/provider/terraform/tests/resource_logging_folder_exclusion_test.go new file mode 100644 index 000000000000..4517876bf25d --- /dev/null +++ b/provider/terraform/tests/resource_logging_folder_exclusion_test.go @@ -0,0 +1,240 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/logging/v2" +) + +func TestAccLoggingFolderExclusion_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + exclusionName := "tf-test-exclusion-" + acctest.RandString(10) + folderName := "tf-test-folder-" + acctest.RandString(10) + description := "Description " + acctest.RandString(10) + + var exclusion logging.LogExclusion + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingFolderExclusionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingFolderExclusion_basic(exclusionName, description, folderName, "organizations/"+org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderExclusionExists("google_logging_folder_exclusion.basic", &exclusion), + testAccCheckLoggingFolderExclusion(&exclusion, "google_logging_folder_exclusion.basic"), + ), + }, + { + ResourceName: "google_logging_folder_exclusion.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingFolderExclusion_folderAcceptsFullFolderPath(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + exclusionName := "tf-test-exclusion-" + acctest.RandString(10) + folderName := "tf-test-folder-" + acctest.RandString(10) + description := "Description " + acctest.RandString(10) + + var exclusion logging.LogExclusion + + checkFn := func(s []*terraform.InstanceState) error { + loggingExclusionId, err := parseLoggingExclusionId(s[0].ID) + if err != nil { + return err + } + + folderAttribute := s[0].Attributes["folder"] + if loggingExclusionId.resourceId != folderAttribute { + return fmt.Errorf("imported folder id does not match: actual = %#v expected = %#v", folderAttribute, loggingExclusionId.resourceId) + } + + return nil + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingFolderExclusionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingFolderExclusion_withFullFolderPath(exclusionName, description, folderName, "organizations/"+org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderExclusionExists("google_logging_folder_exclusion.full-folder", &exclusion), + testAccCheckLoggingFolderExclusion(&exclusion, "google_logging_folder_exclusion.full-folder"), + ), + }, + { + ResourceName: "google_logging_folder_exclusion.full-folder", + ImportState: true, + ImportStateVerify: true, + // We support both notations: folder/[FOLDER_ID] and plain [FOLDER_ID] however the + // importer will always use the plain [FOLDER_ID] notation which will differ from + // the schema if the schema has used the prefixed notation. We have to check this in + // a checkFn instead. + ImportStateVerifyIgnore: []string{"folder"}, + ImportStateCheck: checkFn, + }, + }, + }) +} + +func TestAccLoggingFolderExclusion_update(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + exclusionName := "tf-test-exclusion-" + acctest.RandString(10) + folderName := "tf-test-folder-" + acctest.RandString(10) + parent := "organizations/" + org + descriptionBefore := "Basic Folder Logging Exclusion" + acctest.RandString(10) + descriptionAfter := "Updated Basic Folder Logging Exclusion" + acctest.RandString(10) + + var exclusionBefore, exclusionAfter logging.LogExclusion + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingFolderExclusionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingFolderExclusion_basic(exclusionName, descriptionBefore, folderName, parent), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderExclusionExists("google_logging_folder_exclusion.basic", &exclusionBefore), + testAccCheckLoggingFolderExclusion(&exclusionBefore, "google_logging_folder_exclusion.basic"), + ), + }, + { + Config: testAccLoggingFolderExclusion_basic(exclusionName, descriptionAfter, folderName, parent), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderExclusionExists("google_logging_folder_exclusion.basic", &exclusionAfter), + testAccCheckLoggingFolderExclusion(&exclusionAfter, "google_logging_folder_exclusion.basic"), + ), + }, + { + ResourceName: "google_logging_folder_exclusion.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) + + // Description should have changed, but Filter and Disabled should be the same + if exclusionBefore.Description == exclusionAfter.Description { + t.Errorf("Expected Description to change, but it didn't: Description = %#v", exclusionBefore.Description) + } + if exclusionBefore.Filter != exclusionAfter.Filter { + t.Errorf("Expected Filter to be the same, but it differs: before = %#v, after = %#v", + exclusionBefore.Filter, exclusionAfter.Filter) + } + if exclusionBefore.Disabled != exclusionAfter.Disabled { + t.Errorf("Expected Disabled to be the same, but it differs: before = %#v, after = %#v", + exclusionBefore.Disabled, exclusionAfter.Disabled) + } +} + +func testAccCheckLoggingFolderExclusionDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_logging_folder_exclusion" { + continue + } + + attributes := rs.Primary.Attributes + + _, err := config.clientLogging.Folders.Exclusions.Get(attributes["id"]).Do() + if err == nil { + return fmt.Errorf("folder exclusion still exists") + } + } + + return nil +} + +func testAccCheckLoggingFolderExclusionExists(n string, exclusion *logging.LogExclusion) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + config := testAccProvider.Meta().(*Config) + + si, err := config.clientLogging.Folders.Exclusions.Get(attributes["id"]).Do() + if err != nil { + return err + } + *exclusion = *si + + return nil + } +} + +func testAccCheckLoggingFolderExclusion(exclusion *logging.LogExclusion, n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + + if exclusion.Description != attributes["description"] { + return fmt.Errorf("mismatch on description: api has %s but client has %s", exclusion.Description, attributes["description"]) + } + + if exclusion.Filter != attributes["filter"] { + return fmt.Errorf("mismatch on filter: api has %s but client has %s", exclusion.Filter, attributes["filter"]) + } + + disabledAttribute, err := toBool(attributes["disabled"]) + if err != nil { + return err + } + if exclusion.Disabled != disabledAttribute { + return fmt.Errorf("mismatch on disabled: api has %t but client has %t", exclusion.Disabled, disabledAttribute) + } + + return nil + } +} + +func testAccLoggingFolderExclusion_basic(exclusionName, description, folderName, folderParent string) string { + return fmt.Sprintf(` +resource "google_logging_folder_exclusion" "basic" { + name = "%s" + folder = "${element(split("/", google_folder.my-folder.name), 1)}" + description = "%s" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" +} + +resource "google_folder" "my-folder" { + display_name = "%s" + parent = "%s" +}`, exclusionName, description, getTestProjectFromEnv(), folderName, folderParent) +} + +func testAccLoggingFolderExclusion_withFullFolderPath(exclusionName, description, folderName, folderParent string) string { + return fmt.Sprintf(` +resource "google_logging_folder_exclusion" "full-folder" { + name = "%s" + folder = "${google_folder.my-folder.name}" + description = "%s" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" +} + +resource "google_folder" "my-folder" { + display_name = "%s" + parent = "%s" +}`, exclusionName, description, getTestProjectFromEnv(), folderName, folderParent) +} diff --git a/provider/terraform/tests/resource_logging_folder_sink_test.go b/provider/terraform/tests/resource_logging_folder_sink_test.go new file mode 100644 index 000000000000..490749ac3137 --- /dev/null +++ b/provider/terraform/tests/resource_logging_folder_sink_test.go @@ -0,0 +1,287 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/logging/v2" + "strconv" +) + +func TestAccLoggingFolderSink_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + folderName := "tf-test-folder-" + acctest.RandString(10) + + var sink logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingFolderSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingFolderSink_basic(sinkName, bucketName, folderName, "organizations/"+org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderSinkExists("google_logging_folder_sink.basic", &sink), + testAccCheckLoggingFolderSink(&sink, "google_logging_folder_sink.basic"), + ), + }, { + ResourceName: "google_logging_folder_sink.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingFolderSink_folderAcceptsFullFolderPath(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + folderName := "tf-test-folder-" + acctest.RandString(10) + + var sink logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingFolderSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingFolderSink_withFullFolderPath(sinkName, bucketName, folderName, "organizations/"+org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderSinkExists("google_logging_folder_sink.basic", &sink), + testAccCheckLoggingFolderSink(&sink, "google_logging_folder_sink.basic"), + ), + }, { + ResourceName: "google_logging_folder_sink.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingFolderSink_update(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + updatedBucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + folderName := "tf-test-folder-" + acctest.RandString(10) + parent := "organizations/" + org + + var sinkBefore, sinkAfter logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingFolderSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingFolderSink_basic(sinkName, bucketName, folderName, parent), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderSinkExists("google_logging_folder_sink.basic", &sinkBefore), + testAccCheckLoggingFolderSink(&sinkBefore, "google_logging_folder_sink.basic"), + ), + }, { + Config: testAccLoggingFolderSink_basic(sinkName, updatedBucketName, folderName, parent), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderSinkExists("google_logging_folder_sink.basic", &sinkAfter), + testAccCheckLoggingFolderSink(&sinkAfter, "google_logging_folder_sink.basic"), + ), + }, { + ResourceName: "google_logging_folder_sink.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) + + // Destination should have changed, but WriterIdentity should be the same + if sinkBefore.Destination == sinkAfter.Destination { + t.Errorf("Expected Destination to change, but it didn't: Destination = %#v", sinkBefore.Destination) + } + if sinkBefore.WriterIdentity != sinkAfter.WriterIdentity { + t.Errorf("Expected WriterIdentity to be the same, but it differs: before = %#v, after = %#v", + sinkBefore.WriterIdentity, sinkAfter.WriterIdentity) + } +} + +func TestAccLoggingFolderSink_heredoc(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + folderName := "tf-test-folder-" + acctest.RandString(10) + + var sink logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingFolderSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingFolderSink_heredoc(sinkName, bucketName, folderName, "organizations/"+org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingFolderSinkExists("google_logging_folder_sink.heredoc", &sink), + testAccCheckLoggingFolderSink(&sink, "google_logging_folder_sink.heredoc"), + ), + }, { + ResourceName: "google_logging_folder_sink.heredoc", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckLoggingFolderSinkDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_logging_folder_sink" { + continue + } + + attributes := rs.Primary.Attributes + + _, err := config.clientLogging.Folders.Sinks.Get(attributes["id"]).Do() + if err == nil { + return fmt.Errorf("folder sink still exists") + } + } + + return nil +} + +func testAccCheckLoggingFolderSinkExists(n string, sink *logging.LogSink) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + config := testAccProvider.Meta().(*Config) + + si, err := config.clientLogging.Folders.Sinks.Get(attributes["id"]).Do() + if err != nil { + return err + } + *sink = *si + + return nil + } +} + +func testAccCheckLoggingFolderSink(sink *logging.LogSink, n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + + if sink.Destination != attributes["destination"] { + return fmt.Errorf("mismatch on destination: api has %s but client has %s", sink.Destination, attributes["destination"]) + } + + if sink.Filter != attributes["filter"] { + return fmt.Errorf("mismatch on filter: api has %s but client has %s", sink.Filter, attributes["filter"]) + } + + if sink.WriterIdentity != attributes["writer_identity"] { + return fmt.Errorf("mismatch on writer_identity: api has %s but client has %s", sink.WriterIdentity, attributes["writer_identity"]) + } + + includeChildren := false + if attributes["include_children"] != "" { + includeChildren, err = strconv.ParseBool(attributes["include_children"]) + if err != nil { + return err + } + } + if sink.IncludeChildren != includeChildren { + return fmt.Errorf("mismatch on include_children: api has %v but client has %v", sink.IncludeChildren, includeChildren) + } + + return nil + } +} + +func testAccLoggingFolderSink_basic(sinkName, bucketName, folderName, folderParent string) string { + return fmt.Sprintf(` +resource "google_logging_folder_sink" "basic" { + name = "%s" + folder = "${element(split("/", google_folder.my-folder.name), 1)}" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" + include_children = true +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +} + +resource "google_folder" "my-folder" { + display_name = "%s" + parent = "%s" +}`, sinkName, getTestProjectFromEnv(), bucketName, folderName, folderParent) +} + +func testAccLoggingFolderSink_withFullFolderPath(sinkName, bucketName, folderName, folderParent string) string { + return fmt.Sprintf(` +resource "google_logging_folder_sink" "basic" { + name = "%s" + folder = "${google_folder.my-folder.name}" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" + include_children = false +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +} + +resource "google_folder" "my-folder" { + display_name = "%s" + parent = "%s" +}`, sinkName, getTestProjectFromEnv(), bucketName, folderName, folderParent) +} + +func testAccLoggingFolderSink_heredoc(sinkName, bucketName, folderName, folderParent string) string { + return fmt.Sprintf(` +resource "google_logging_folder_sink" "heredoc" { + name = "%s" + folder = "${element(split("/", google_folder.my-folder.name), 1)}" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = <=ERROR + + + + EOS + include_children = true +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +} + +resource "google_folder" "my-folder" { + display_name = "%s" + parent = "%s" +}`, sinkName, getTestProjectFromEnv(), bucketName, folderName, folderParent) +} diff --git a/provider/terraform/tests/resource_logging_organization_exclusion_test.go b/provider/terraform/tests/resource_logging_organization_exclusion_test.go new file mode 100644 index 000000000000..ff3e168957c7 --- /dev/null +++ b/provider/terraform/tests/resource_logging_organization_exclusion_test.go @@ -0,0 +1,167 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/logging/v2" +) + +func TestAccLoggingOrganizationExclusion_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + exclusionName := "tf-test-exclusion-" + acctest.RandString(10) + description := "Description " + acctest.RandString(10) + + var exclusion logging.LogExclusion + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingOrganizationExclusionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingOrganizationExclusion_basic(exclusionName, description, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingOrganizationExclusionExists("google_logging_organization_exclusion.basic", &exclusion), + testAccCheckLoggingOrganizationExclusion(&exclusion, "google_logging_organization_exclusion.basic"), + ), + }, + { + ResourceName: "google_logging_organization_exclusion.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingOrganizationExclusion_update(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + exclusionName := "tf-test-exclusion-" + acctest.RandString(10) + descriptionBefore := "Basic Organization Logging Exclusion" + acctest.RandString(10) + descriptionAfter := "Updated Basic Organization Logging Exclusion" + acctest.RandString(10) + + var exclusionBefore, exclusionAfter logging.LogExclusion + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingOrganizationExclusionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingOrganizationExclusion_basic(exclusionName, descriptionBefore, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingOrganizationExclusionExists("google_logging_organization_exclusion.basic", &exclusionBefore), + testAccCheckLoggingOrganizationExclusion(&exclusionBefore, "google_logging_organization_exclusion.basic"), + ), + }, + { + Config: testAccLoggingOrganizationExclusion_basic(exclusionName, descriptionAfter, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingOrganizationExclusionExists("google_logging_organization_exclusion.basic", &exclusionAfter), + testAccCheckLoggingOrganizationExclusion(&exclusionAfter, "google_logging_organization_exclusion.basic"), + ), + }, + { + ResourceName: "google_logging_organization_exclusion.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) + + // Description should have changed, but Filter and Disabled should be the same + if exclusionBefore.Description == exclusionAfter.Description { + t.Errorf("Expected Description to change, but it didn't: Description = %#v", exclusionBefore.Description) + } + if exclusionBefore.Filter != exclusionAfter.Filter { + t.Errorf("Expected Filter to be the same, but it differs: before = %#v, after = %#v", + exclusionBefore.Filter, exclusionAfter.Filter) + } + if exclusionBefore.Disabled != exclusionAfter.Disabled { + t.Errorf("Expected Disabled to be the same, but it differs: before = %#v, after = %#v", + exclusionBefore.Disabled, exclusionAfter.Disabled) + } +} + +func testAccCheckLoggingOrganizationExclusionDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_logging_organization_exclusion" { + continue + } + + attributes := rs.Primary.Attributes + + _, err := config.clientLogging.Organizations.Exclusions.Get(attributes["id"]).Do() + if err == nil { + return fmt.Errorf("organization exclusion still exists") + } + } + + return nil +} + +func testAccCheckLoggingOrganizationExclusionExists(n string, exclusion *logging.LogExclusion) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + config := testAccProvider.Meta().(*Config) + + si, err := config.clientLogging.Organizations.Exclusions.Get(attributes["id"]).Do() + if err != nil { + return err + } + *exclusion = *si + + return nil + } +} + +func testAccCheckLoggingOrganizationExclusion(exclusion *logging.LogExclusion, n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + + if exclusion.Description != attributes["description"] { + return fmt.Errorf("mismatch on description: api has %s but client has %s", exclusion.Description, attributes["description"]) + } + + if exclusion.Filter != attributes["filter"] { + return fmt.Errorf("mismatch on filter: api has %s but client has %s", exclusion.Filter, attributes["filter"]) + } + + disabledAttribute, err := toBool(attributes["disabled"]) + if err != nil { + return err + } + if exclusion.Disabled != disabledAttribute { + return fmt.Errorf("mismatch on disabled: api has %t but client has %t", exclusion.Disabled, disabledAttribute) + } + + return nil + } +} + +func testAccLoggingOrganizationExclusion_basic(exclusionName, description, orgId string) string { + return fmt.Sprintf(` +resource "google_logging_organization_exclusion" "basic" { + name = "%s" + org_id = "%s" + description = "%s" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" +} +`, exclusionName, orgId, description, getTestProjectFromEnv()) +} diff --git a/provider/terraform/tests/resource_logging_organization_sink_test.go b/provider/terraform/tests/resource_logging_organization_sink_test.go new file mode 100644 index 000000000000..a5149cecadc2 --- /dev/null +++ b/provider/terraform/tests/resource_logging_organization_sink_test.go @@ -0,0 +1,239 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/logging/v2" + "strconv" +) + +func TestAccLoggingOrganizationSink_basic(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + + var sink logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingOrganizationSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingOrganizationSink_basic(sinkName, bucketName, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingOrganizationSinkExists("google_logging_organization_sink.basic", &sink), + testAccCheckLoggingOrganizationSink(&sink, "google_logging_organization_sink.basic"), + ), + }, { + ResourceName: "google_logging_organization_sink.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingOrganizationSink_update(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + updatedBucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + + var sinkBefore, sinkAfter logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingOrganizationSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingOrganizationSink_update(sinkName, bucketName, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingOrganizationSinkExists("google_logging_organization_sink.update", &sinkBefore), + testAccCheckLoggingOrganizationSink(&sinkBefore, "google_logging_organization_sink.update"), + ), + }, { + Config: testAccLoggingOrganizationSink_update(sinkName, updatedBucketName, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingOrganizationSinkExists("google_logging_organization_sink.update", &sinkAfter), + testAccCheckLoggingOrganizationSink(&sinkAfter, "google_logging_organization_sink.update"), + ), + }, { + ResourceName: "google_logging_organization_sink.update", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) + + // Destination should have changed, but WriterIdentity should be the same + if sinkBefore.Destination == sinkAfter.Destination { + t.Errorf("Expected Destination to change, but it didn't: Destination = %#v", sinkBefore.Destination) + } + if sinkBefore.WriterIdentity != sinkAfter.WriterIdentity { + t.Errorf("Expected WriterIdentity to be the same, but it differs: before = %#v, after = %#v", + sinkBefore.WriterIdentity, sinkAfter.WriterIdentity) + } +} + +func TestAccLoggingOrganizationSink_heredoc(t *testing.T) { + t.Parallel() + + org := getTestOrgFromEnv(t) + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + + var sink logging.LogSink + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingOrganizationSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingOrganizationSink_heredoc(sinkName, bucketName, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingOrganizationSinkExists("google_logging_organization_sink.heredoc", &sink), + testAccCheckLoggingOrganizationSink(&sink, "google_logging_organization_sink.heredoc"), + ), + }, { + ResourceName: "google_logging_organization_sink.heredoc", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckLoggingOrganizationSinkDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_logging_organization_sink" { + continue + } + + attributes := rs.Primary.Attributes + + _, err := config.clientLogging.Organizations.Sinks.Get(attributes["id"]).Do() + if err == nil { + return fmt.Errorf("organization sink still exists") + } + } + + return nil +} + +func testAccCheckLoggingOrganizationSinkExists(n string, sink *logging.LogSink) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + config := testAccProvider.Meta().(*Config) + + si, err := config.clientLogging.Organizations.Sinks.Get(attributes["id"]).Do() + if err != nil { + return err + } + *sink = *si + + return nil + } +} + +func testAccCheckLoggingOrganizationSink(sink *logging.LogSink, n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + + if sink.Destination != attributes["destination"] { + return fmt.Errorf("mismatch on destination: api has %s but client has %s", sink.Destination, attributes["destination"]) + } + + if sink.Filter != attributes["filter"] { + return fmt.Errorf("mismatch on filter: api has %s but client has %s", sink.Filter, attributes["filter"]) + } + + if sink.WriterIdentity != attributes["writer_identity"] { + return fmt.Errorf("mismatch on writer_identity: api has %s but client has %s", sink.WriterIdentity, attributes["writer_identity"]) + } + + includeChildren := false + if attributes["include_children"] != "" { + includeChildren, err = strconv.ParseBool(attributes["include_children"]) + if err != nil { + return err + } + } + if sink.IncludeChildren != includeChildren { + return fmt.Errorf("mismatch on include_children: api has %v but client has %v", sink.IncludeChildren, includeChildren) + } + + return nil + } +} + +func testAccLoggingOrganizationSink_basic(sinkName, bucketName, orgId string) string { + return fmt.Sprintf(` +resource "google_logging_organization_sink" "basic" { + name = "%s" + org_id = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" + include_children = true +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +}`, sinkName, orgId, getTestProjectFromEnv(), bucketName) +} + +func testAccLoggingOrganizationSink_update(sinkName, bucketName, orgId string) string { + return fmt.Sprintf(` +resource "google_logging_organization_sink" "update" { + name = "%s" + org_id = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + include_children = false +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +}`, sinkName, orgId, getTestProjectFromEnv(), bucketName) +} + +func testAccLoggingOrganizationSink_heredoc(sinkName, bucketName, orgId string) string { + return fmt.Sprintf(` +resource "google_logging_organization_sink" "heredoc" { + name = "%s" + org_id = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = <=ERROR + + + + EOS + include_children = true +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +}`, sinkName, orgId, getTestProjectFromEnv(), bucketName) +} diff --git a/provider/terraform/tests/resource_logging_project_exclusion_test.go b/provider/terraform/tests/resource_logging_project_exclusion_test.go new file mode 100644 index 000000000000..c53521d71718 --- /dev/null +++ b/provider/terraform/tests/resource_logging_project_exclusion_test.go @@ -0,0 +1,225 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/logging/v2" +) + +func TestAccLoggingProjectExclusion_basic(t *testing.T) { + t.Parallel() + + exclusionName := "tf-test-exclusion-" + acctest.RandString(10) + + var exclusion logging.LogExclusion + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingProjectExclusionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingProjectExclusion_basic(exclusionName), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingProjectExclusionExists("google_logging_project_exclusion.basic", &exclusion), + testAccCheckLoggingProjectExclusion(&exclusion, "google_logging_project_exclusion.basic")), + }, + { + ResourceName: "google_logging_project_exclusion.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingProjectExclusion_disablePreservesFilter(t *testing.T) { + t.Parallel() + + exclusionName := "tf-test-exclusion-" + acctest.RandString(10) + + var exclusionBefore, exclusionAfter logging.LogExclusion + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingProjectExclusionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingProjectExclusion_basic(exclusionName), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingProjectExclusionExists("google_logging_project_exclusion.basic", &exclusionBefore), + testAccCheckLoggingProjectExclusion(&exclusionBefore, "google_logging_project_exclusion.basic"), + ), + }, + { + Config: testAccLoggingProjectExclusion_basicDisabled(exclusionName), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingProjectExclusionExists("google_logging_project_exclusion.basic", &exclusionAfter), + testAccCheckLoggingProjectExclusion(&exclusionAfter, "google_logging_project_exclusion.basic"), + ), + }, + { + ResourceName: "google_logging_project_exclusion.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) + + // Description and Disabled should have changed, but Filter should be the same + if exclusionBefore.Description == exclusionAfter.Description { + t.Errorf("Expected Description to change, but it didn't: Description = %#v", exclusionBefore.Description) + } + if exclusionBefore.Filter != exclusionAfter.Filter { + t.Errorf("Expected Filter to be the same, but it differs: before = %#v, after = %#v", + exclusionBefore.Filter, exclusionAfter.Filter) + } + if exclusionBefore.Disabled == exclusionAfter.Disabled { + t.Errorf("Expected Disabled to change, but it didn't: Disabled = %#v", exclusionBefore.Disabled) + } +} + +func TestAccLoggingProjectExclusion_update(t *testing.T) { + t.Parallel() + + exclusionName := "tf-test-exclusion-" + acctest.RandString(10) + + var exclusionBefore, exclusionAfter logging.LogExclusion + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingProjectExclusionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingProjectExclusion_basic(exclusionName), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingProjectExclusionExists("google_logging_project_exclusion.basic", &exclusionBefore), + testAccCheckLoggingProjectExclusion(&exclusionBefore, "google_logging_project_exclusion.basic"), + ), + }, + { + Config: testAccLoggingProjectExclusion_basicUpdated(exclusionName), + Check: resource.ComposeTestCheckFunc( + testAccCheckLoggingProjectExclusionExists("google_logging_project_exclusion.basic", &exclusionAfter), + testAccCheckLoggingProjectExclusion(&exclusionAfter, "google_logging_project_exclusion.basic"), + ), + }, + { + ResourceName: "google_logging_project_exclusion.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) + + // Filter should have changed, but Description and Disabled should be the same + if exclusionBefore.Description != exclusionAfter.Description { + t.Errorf("Expected Description to be the same, but it differs: before = %#v, after = %#v", + exclusionBefore.Description, exclusionAfter.Description) + } + if exclusionBefore.Filter == exclusionAfter.Filter { + t.Errorf("Expected Filter to change, but it didn't: Filter = %#v", exclusionBefore.Filter) + } + if exclusionBefore.Disabled != exclusionAfter.Disabled { + t.Errorf("Expected Disabled to be the same, but it differs: before = %#v, after = %#v", + exclusionBefore.Disabled, exclusionAfter.Disabled) + } +} + +func testAccCheckLoggingProjectExclusionDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_logging_project_exclusion" { + continue + } + + attributes := rs.Primary.Attributes + + _, err := config.clientLogging.Projects.Exclusions.Get(attributes["id"]).Do() + if err == nil { + return fmt.Errorf("project exclusion still exists") + } + } + + return nil +} + +func testAccCheckLoggingProjectExclusionExists(n string, exclusion *logging.LogExclusion) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + config := testAccProvider.Meta().(*Config) + + si, err := config.clientLogging.Projects.Exclusions.Get(attributes["id"]).Do() + if err != nil { + return err + } + *exclusion = *si + + return nil + } +} + +func testAccCheckLoggingProjectExclusion(exclusion *logging.LogExclusion, n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + attributes, err := getResourceAttributes(n, s) + if err != nil { + return err + } + + if exclusion.Description != attributes["description"] { + return fmt.Errorf("mismatch on description: api has %s but client has %s", exclusion.Description, attributes["description"]) + } + + if exclusion.Filter != attributes["filter"] { + return fmt.Errorf("mismatch on filter: api has %s but client has %s", exclusion.Filter, attributes["filter"]) + } + + disabledAttribute, err := toBool(attributes["disabled"]) + if err != nil { + return err + } + if exclusion.Disabled != disabledAttribute { + return fmt.Errorf("mismatch on disabled: api has %t but client has %t", exclusion.Disabled, disabledAttribute) + } + + return nil + } +} + +func testAccLoggingProjectExclusion_basic(name string) string { + return fmt.Sprintf(` +resource "google_logging_project_exclusion" "basic" { + name = "%s" + description = "Basic Project Logging Exclusion" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" +}`, name, getTestProjectFromEnv()) +} + +func testAccLoggingProjectExclusion_basicUpdated(name string) string { + return fmt.Sprintf(` +resource "google_logging_project_exclusion" "basic" { + name = "%s" + description = "Basic Project Logging Exclusion" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=INFO" +}`, name, getTestProjectFromEnv()) +} + +func testAccLoggingProjectExclusion_basicDisabled(name string) string { + return fmt.Sprintf(` +resource "google_logging_project_exclusion" "basic" { + name = "%s" + description = "" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" + disabled = true +}`, name, getTestProjectFromEnv()) +} diff --git a/provider/terraform/tests/resource_logging_project_sink_test.go b/provider/terraform/tests/resource_logging_project_sink_test.go new file mode 100644 index 000000000000..5863a71af231 --- /dev/null +++ b/provider/terraform/tests/resource_logging_project_sink_test.go @@ -0,0 +1,179 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccLoggingProjectSink_basic(t *testing.T) { + t.Parallel() + + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingProjectSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingProjectSink_basic(sinkName, getTestProjectFromEnv(), bucketName), + }, + { + ResourceName: "google_logging_project_sink.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingProjectSink_updatePreservesUniqueWriter(t *testing.T) { + t.Parallel() + + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + updatedBucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingProjectSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingProjectSink_uniqueWriter(sinkName, bucketName), + }, + { + ResourceName: "google_logging_project_sink.unique_writer", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccLoggingProjectSink_uniqueWriterUpdated(sinkName, updatedBucketName), + }, + { + ResourceName: "google_logging_project_sink.unique_writer", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccLoggingProjectSink_heredoc(t *testing.T) { + t.Parallel() + + sinkName := "tf-test-sink-" + acctest.RandString(10) + bucketName := "tf-test-sink-bucket-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckLoggingProjectSinkDestroy, + Steps: []resource.TestStep{ + { + Config: testAccLoggingProjectSink_heredoc(sinkName, getTestProjectFromEnv(), bucketName), + }, + { + ResourceName: "google_logging_project_sink.heredoc", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckLoggingProjectSinkDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_logging_project_sink" { + continue + } + + attributes := rs.Primary.Attributes + + _, err := config.clientLogging.Projects.Sinks.Get(attributes["id"]).Do() + if err == nil { + return fmt.Errorf("project sink still exists") + } + } + + return nil +} + +func testAccLoggingProjectSink_basic(name, project, bucketName string) string { + return fmt.Sprintf(` +resource "google_logging_project_sink" "basic" { + name = "%s" + project = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" + + unique_writer_identity = false +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +} +`, name, project, project, bucketName) +} + +func testAccLoggingProjectSink_uniqueWriter(name, bucketName string) string { + return fmt.Sprintf(` +resource "google_logging_project_sink" "unique_writer" { + name = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=ERROR" + + unique_writer_identity = true +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +} +`, name, getTestProjectFromEnv(), bucketName) +} + +func testAccLoggingProjectSink_uniqueWriterUpdated(name, bucketName string) string { + return fmt.Sprintf(` +resource "google_logging_project_sink" "unique_writer" { + name = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + filter = "logName=\"projects/%s/logs/compute.googleapis.com%%2Factivity_log\" AND severity>=WARNING" + + unique_writer_identity = true +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +} +`, name, getTestProjectFromEnv(), bucketName) +} + +func testAccLoggingProjectSink_heredoc(name, project, bucketName string) string { + return fmt.Sprintf(` +resource "google_logging_project_sink" "heredoc" { + name = "%s" + project = "%s" + destination = "storage.googleapis.com/${google_storage_bucket.log-bucket.name}" + + filter = <=ERROR + + EOS + + unique_writer_identity = false +} + +resource "google_storage_bucket" "log-bucket" { + name = "%s" +} +`, name, project, project, bucketName) +} diff --git a/provider/terraform/tests/resource_pubsub_subscription_iam_test.go b/provider/terraform/tests/resource_pubsub_subscription_iam_test.go new file mode 100644 index 000000000000..239bad8d0c3f --- /dev/null +++ b/provider/terraform/tests/resource_pubsub_subscription_iam_test.go @@ -0,0 +1,250 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccPubsubSubscriptionIamBinding(t *testing.T) { + t.Parallel() + + topic := "test-topic-iam-" + acctest.RandString(10) + subscription := "test-subscription-iam-" + acctest.RandString(10) + account := "test-iam-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test IAM Binding creation + Config: testAccPubsubSubscriptionIamBinding_basic(subscription, topic, account), + Check: testAccCheckPubsubSubscriptionIam(subscription, "roles/pubsub.subscriber", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + // Test IAM Binding update + Config: testAccPubsubSubscriptionIamBinding_update(subscription, topic, account), + Check: testAccCheckPubsubSubscriptionIam(subscription, "roles/pubsub.subscriber", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + fmt.Sprintf("serviceAccount:%s-2@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_pubsub_subscription_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s roles/pubsub.subscriber", getComputedSubscriptionName(getTestProjectFromEnv(), subscription)), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccPubsubSubscriptionIamMember(t *testing.T) { + t.Parallel() + + topic := "test-topic-iam-" + acctest.RandString(10) + subscription := "test-subscription-iam-" + acctest.RandString(10) + account := "test-iam-" + acctest.RandString(10) + accountEmail := fmt.Sprintf("%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccPubsubSubscriptionIamMember_basic(subscription, topic, account), + Check: testAccCheckPubsubSubscriptionIam(subscription, "roles/pubsub.subscriber", []string{ + fmt.Sprintf("serviceAccount:%s", accountEmail), + }), + }, + { + ResourceName: "google_pubsub_subscription_iam_member.foo", + ImportStateId: fmt.Sprintf("%s roles/pubsub.subscriber serviceAccount:%s", getComputedSubscriptionName(getTestProjectFromEnv(), subscription), accountEmail), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccPubsubSubscriptionIamPolicy(t *testing.T) { + t.Parallel() + + topic := "test-topic-iam-" + acctest.RandString(10) + subscription := "test-subscription-iam-" + acctest.RandString(10) + account := "test-iam-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccPubsubSubscriptionIamPolicy_basic(subscription, topic, account, "roles/pubsub.subscriber"), + Check: testAccCheckPubsubSubscriptionIam(subscription, "roles/pubsub.subscriber", []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + Config: testAccPubsubSubscriptionIamPolicy_basic(subscription, topic, account, "roles/pubsub.viewer"), + Check: testAccCheckPubsubSubscriptionIam(subscription, "roles/pubsub.viewer", []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_pubsub_subscription_iam_policy.foo", + ImportStateId: getComputedSubscriptionName(getTestProjectFromEnv(), subscription), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckPubsubSubscriptionIam(subscription, role string, members []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + p, err := config.clientPubsub.Projects.Subscriptions.GetIamPolicy(getComputedSubscriptionName(getTestProjectFromEnv(), subscription)).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == role { + sort.Strings(members) + sort.Strings(binding.Members) + + if reflect.DeepEqual(members, binding.Members) { + return nil + } + + return fmt.Errorf("Binding found but expected members is %v, got %v", members, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", role) + } +} + +func testAccPubsubSubscriptionIamBinding_basic(subscription, topic, account string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_pubsub_subscription" "subscription" { + name = "%s" + topic = "${google_pubsub_topic.topic.id}" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_pubsub_subscription_iam_binding" "foo" { + subscription = "${google_pubsub_subscription.subscription.id}" + role = "roles/pubsub.subscriber" + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + ] +} +`, topic, subscription, account) +} + +func testAccPubsubSubscriptionIamBinding_update(subscription, topic, account string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_pubsub_subscription" "subscription" { + name = "%s" + topic = "${google_pubsub_topic.topic.id}" +} + + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_service_account" "test-account-2" { + account_id = "%s-2" + display_name = "Iam Testing Account" +} + +resource "google_pubsub_subscription_iam_binding" "foo" { + subscription = "${google_pubsub_subscription.subscription.id}" + role = "roles/pubsub.subscriber" + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + "serviceAccount:${google_service_account.test-account-2.email}", + ] +} +`, topic, subscription, account, account) +} + +func testAccPubsubSubscriptionIamMember_basic(subscription, topic, account string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_pubsub_subscription" "subscription" { + name = "%s" + topic = "${google_pubsub_topic.topic.id}" +} + + +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +resource "google_pubsub_subscription_iam_member" "foo" { + subscription = "${google_pubsub_subscription.subscription.id}" + role = "roles/pubsub.subscriber" + member = "serviceAccount:${google_service_account.test-account.email}" +} +`, topic, subscription, account) +} + +func testAccPubsubSubscriptionIamPolicy_basic(subscription, topic, account, role string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_pubsub_subscription" "subscription" { + name = "%s" + topic = "${google_pubsub_topic.topic.id}" +} + + +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +data "google_iam_policy" "foo" { + binding { + role = "%s" + members = ["serviceAccount:${google_service_account.test-account.email}"] + } +} + +resource "google_pubsub_subscription_iam_policy" "foo" { + subscription = "${google_pubsub_subscription.subscription.id}" + policy_data = "${data.google_iam_policy.foo.policy_data}" +} +`, topic, subscription, account, role) +} diff --git a/provider/terraform/tests/resource_pubsub_subscription_test.go b/provider/terraform/tests/resource_pubsub_subscription_test.go new file mode 100644 index 000000000000..d813680b4f93 --- /dev/null +++ b/provider/terraform/tests/resource_pubsub_subscription_test.go @@ -0,0 +1,103 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccPubsubSubscription_basic(t *testing.T) { + t.Parallel() + + topic := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(10)) + subscription := fmt.Sprintf("tf-test-sub-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckPubsubSubscriptionDestroy, + Steps: []resource.TestStep{ + { + Config: testAccPubsubSubscription_basic(topic, subscription), + }, + resource.TestStep{ + ResourceName: "google_pubsub_subscription.foo", + ImportStateId: subscription, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +// TODO: Add acceptance test for push delivery. +// +// Testing push endpoints is tricky for the following reason: +// - You need a publicly accessible HTTPS server to handle POST requests in order to receive push messages. +// - The server must present a valid SSL certificate signed by a certificate authority +// - The server must be routable by DNS. +// - You also need to validate that you own the domain (or have equivalent access to the endpoint). +// - Finally, you must register the endpoint domain with the GCP project. +// +// An easy way to test this would be to create an App Engine Hello World app. With AppEngine, SSL certificate, DNS and domain registry is handled for us. +// App Engine is not yet supported by Terraform but once it is, it will provide an easy path to testing push configs. +// Another option would be to use Cloud Functions once Terraform support is added. +func testAccCheckPubsubSubscriptionDestroy(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_pubsub_subscription" { + continue + } + + config := testAccProvider.Meta().(*Config) + sub, _ := config.clientPubsub.Projects.Subscriptions.Get(rs.Primary.ID).Do() + if sub != nil { + return fmt.Errorf("Subscription still present") + } + } + + return nil +} + +func testAccPubsubSubscription_basic(topic, subscription string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "foo" { + name = "%s" +} + +resource "google_pubsub_subscription" "foo" { + name = "%s" + topic = "${google_pubsub_topic.foo.name}" + ack_deadline_seconds = 20 +}`, topic, subscription) +} + +func TestGetComputedTopicName(t *testing.T) { + type testData struct { + project string + topic string + expected string + } + + var testCases = []testData{ + testData{ + project: "my-project", + topic: "my-topic", + expected: "projects/my-project/topics/my-topic", + }, + testData{ + project: "my-project", + topic: "projects/another-project/topics/my-topic", + expected: "projects/another-project/topics/my-topic", + }, + } + + for _, testCase := range testCases { + computedTopicName := getComputedTopicName(testCase.project, testCase.topic) + if computedTopicName != testCase.expected { + t.Fatalf("bad computed topic name: %s' => expected %s", computedTopicName, testCase.expected) + } + } +} diff --git a/provider/terraform/tests/resource_pubsub_topic_iam_test.go b/provider/terraform/tests/resource_pubsub_topic_iam_test.go new file mode 100644 index 000000000000..8a1d61652bd3 --- /dev/null +++ b/provider/terraform/tests/resource_pubsub_topic_iam_test.go @@ -0,0 +1,276 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccPubsubTopicIamBinding(t *testing.T) { + t.Parallel() + + topic := "test-topic-iam-" + acctest.RandString(10) + account := "test-topic-iam-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test IAM Binding creation + Config: testAccPubsubTopicIamBinding_basic(topic, account), + Check: testAccCheckPubsubTopicIam(topic, "roles/pubsub.publisher", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_pubsub_topic_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher", getComputedTopicName(getTestProjectFromEnv(), topic)), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test IAM Binding update + Config: testAccPubsubTopicIamBinding_update(topic, account), + Check: testAccCheckPubsubTopicIam(topic, "roles/pubsub.publisher", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + fmt.Sprintf("serviceAccount:%s-2@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_pubsub_topic_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher", getComputedTopicName(getTestProjectFromEnv(), topic)), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccPubsubTopicIamBinding_topicName(t *testing.T) { + t.Parallel() + + topic := "test-topic-iam-" + acctest.RandString(10) + account := "test-topic-iam-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test IAM Binding creation + Config: testAccPubsubTopicIamBinding_topicName(topic, account), + Check: testAccCheckPubsubTopicIam(topic, "roles/pubsub.publisher", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + // No import step- imports want the resource to be defined using the full id as the topic + }, + }) +} + +func TestAccPubsubTopicIamMember(t *testing.T) { + t.Parallel() + + topic := "test-topic-iam-" + acctest.RandString(10) + account := "test-topic-iam-" + acctest.RandString(10) + accountEmail := fmt.Sprintf("%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccPubsubTopicIamMember_basic(topic, account), + Check: testAccCheckPubsubTopicIam(topic, "roles/pubsub.publisher", []string{ + fmt.Sprintf("serviceAccount:%s", accountEmail), + }), + }, + { + ResourceName: "google_pubsub_topic_iam_member.foo", + ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher serviceAccount:%s", getComputedTopicName(getTestProjectFromEnv(), topic), accountEmail), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccPubsubTopicIamPolicy(t *testing.T) { + t.Parallel() + + topic := "test-topic-iam-" + acctest.RandString(10) + account := "test-topic-iam-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccPubsubTopicIamPolicy_basic(topic, account, "roles/pubsub.publisher"), + Check: testAccCheckPubsubTopicIam(topic, "roles/pubsub.publisher", []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + Config: testAccPubsubTopicIamPolicy_basic(topic, account, "roles/pubsub.subscriber"), + Check: testAccCheckPubsubTopicIam(topic, "roles/pubsub.subscriber", []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + ResourceName: "google_pubsub_topic_iam_policy.foo", + ImportStateId: getComputedTopicName(getTestProjectFromEnv(), topic), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckPubsubTopicIam(topic, role string, members []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + p, err := config.clientPubsub.Projects.Topics.GetIamPolicy(getComputedTopicName(getTestProjectFromEnv(), topic)).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == role { + sort.Strings(members) + sort.Strings(binding.Members) + + if reflect.DeepEqual(members, binding.Members) { + return nil + } + + return fmt.Errorf("Binding found but expected members is %v, got %v", members, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", role) + } +} + +func testAccPubsubTopicIamBinding_topicName(topic, account string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_pubsub_topic_iam_binding" "foo" { + project = "%s" + topic = "${google_pubsub_topic.topic.name}" + role = "roles/pubsub.publisher" + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + ] +} +`, topic, account, getTestProjectFromEnv()) +} + +func testAccPubsubTopicIamBinding_basic(topic, account string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_pubsub_topic_iam_binding" "foo" { + # use the id instead of the name because it's more compatible with import + topic = "${google_pubsub_topic.topic.id}" + role = "roles/pubsub.publisher" + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + ] +} +`, topic, account) +} + +func testAccPubsubTopicIamBinding_update(topic, account string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_service_account" "test-account-2" { + account_id = "%s-2" + display_name = "Iam Testing Account" +} + +resource "google_pubsub_topic_iam_binding" "foo" { + # use the id instead of the name because it's more compatible with import + topic = "${google_pubsub_topic.topic.id}" + role = "roles/pubsub.publisher" + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + "serviceAccount:${google_service_account.test-account-2.email}", + ] +} +`, topic, account, account) +} + +func testAccPubsubTopicIamMember_basic(topic, account string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +resource "google_pubsub_topic_iam_member" "foo" { + topic = "${google_pubsub_topic.topic.id}" + role = "roles/pubsub.publisher" + member = "serviceAccount:${google_service_account.test-account.email}" +} +`, topic, account) +} + +func testAccPubsubTopicIamPolicy_basic(topic, account, role string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Iam Testing Account" +} + +data "google_iam_policy" "foo" { + binding { + role = "%s" + members = ["serviceAccount:${google_service_account.test-account.email}"] + } +} + +resource "google_pubsub_topic_iam_policy" "foo" { + topic = "${google_pubsub_topic.topic.id}" + policy_data = "${data.google_iam_policy.foo.policy_data}" +} +`, topic, account, role) +} diff --git a/provider/terraform/tests/resource_pubsub_topic_test.go b/provider/terraform/tests/resource_pubsub_topic_test.go new file mode 100644 index 000000000000..7fd62ba591e9 --- /dev/null +++ b/provider/terraform/tests/resource_pubsub_topic_test.go @@ -0,0 +1,65 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccPubsubTopic_basic(t *testing.T) { + t.Parallel() + + topicName := acctest.RandomWithPrefix("tf-test-topic") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckPubsubTopicDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccPubsubTopic_basic(topicName), + }, + // Check importing with just the topic name + resource.TestStep{ + ResourceName: "google_pubsub_topic.foo", + ImportStateId: topicName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + // Check importing with the full resource id + resource.TestStep{ + ResourceName: "google_pubsub_topic.foo", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func testAccCheckPubsubTopicDestroy(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_pubsub_topic" { + continue + } + + config := testAccProvider.Meta().(*Config) + topic, _ := config.clientPubsub.Projects.Topics.Get(rs.Primary.ID).Do() + if topic != nil { + return fmt.Errorf("Topic still present") + } + } + + return nil +} + +func testAccPubsubTopic_basic(name string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "foo" { + name = "%s" +}`, name) +} diff --git a/provider/terraform/tests/resource_resourcemanager_lien_test.go b/provider/terraform/tests/resource_resourcemanager_lien_test.go new file mode 100644 index 000000000000..8e8ff2c915eb --- /dev/null +++ b/provider/terraform/tests/resource_resourcemanager_lien_test.go @@ -0,0 +1,108 @@ +package google + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + resourceManager "google.golang.org/api/cloudresourcemanager/v1" +) + +func TestAccResourceManagerLien_basic(t *testing.T) { + t.Parallel() + + projectName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + org := getTestOrgFromEnv(t) + var lien resourceManager.Lien + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckResourceManagerLienDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccResourceManagerLien_basic(projectName, org), + Check: resource.ComposeTestCheckFunc( + testAccCheckResourceManagerLienExists( + "google_resource_manager_lien.lien", projectName, &lien), + ), + }, + resource.TestStep{ + ResourceName: "google_resource_manager_lien.lien", + ImportState: true, + ImportStateVerify: true, + ImportStateIdFunc: func(_ *terraform.State) (string, error) { + // This has to be a function to close over lien.Name, which is necessary + // because Name is a Computed attribute. + return fmt.Sprintf("%s/%s", + projectName, + strings.Split(lien.Name, "/")[1]), nil + }, + }, + }, + }) +} + +func testAccCheckResourceManagerLienExists(n, projectName string, lien *resourceManager.Lien) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + found, err := config.clientResourceManager.Liens.List().Parent(fmt.Sprintf("projects/%s", projectName)).Do() + if err != nil { + return err + } + if len(found.Liens) != 1 { + return fmt.Errorf("Lien %s not found", rs.Primary.ID) + } + + *lien = *found.Liens[0] + + return nil + } +} + +func testAccCheckResourceManagerLienDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_resource_manager_lien" { + continue + } + + _, err := config.clientResourceManager.Liens.List().Parent(fmt.Sprintf("projects/%s", rs.Primary.Attributes["parent"])).Do() + if err == nil { + return fmt.Errorf("Lien %s still exists", rs.Primary.ID) + } + } + + return nil +} + +func testAccResourceManagerLien_basic(projectName, org string) string { + return fmt.Sprintf(` +resource "google_project" "project" { + project_id = "%s" + name = "some test project" + org_id = "%s" +} + +resource "google_resource_manager_lien" "lien" { + parent = "projects/${google_project.project.project_id}" + restrictions = ["resourcemanager.projects.delete"] + origin = "something" + reason = "something else" +} +`, projectName, org) +} diff --git a/provider/terraform/tests/resource_runtimeconfig_config_test.go b/provider/terraform/tests/resource_runtimeconfig_config_test.go new file mode 100644 index 000000000000..57ccbdfa3fbf --- /dev/null +++ b/provider/terraform/tests/resource_runtimeconfig_config_test.go @@ -0,0 +1,170 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/runtimeconfig/v1beta1" +) + +func TestAccRuntimeconfigConfig_basic(t *testing.T) { + t.Parallel() + + var runtimeConfig runtimeconfig.RuntimeConfig + configName := fmt.Sprintf("runtimeconfig-test-%s", acctest.RandString(10)) + description := "my test description" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckRuntimeconfigConfigDestroy, + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigConfig_basicDescription(configName, description), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, description), + ), + }, + resource.TestStep{ + ResourceName: "google_runtimeconfig_config.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccRuntimeconfig_update(t *testing.T) { + t.Parallel() + + var runtimeConfig runtimeconfig.RuntimeConfig + configName := fmt.Sprintf("runtimeconfig-test-%s", acctest.RandString(10)) + firstDescription := "my test description" + secondDescription := "my updated test description" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckRuntimeconfigConfigDestroy, + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigConfig_basicDescription(configName, firstDescription), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, firstDescription), + ), + }, { + Config: testAccRuntimeconfigConfig_basicDescription(configName, secondDescription), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, secondDescription), + ), + }, + }, + }) +} + +func TestAccRuntimeconfig_updateEmptyDescription(t *testing.T) { + t.Parallel() + + var runtimeConfig runtimeconfig.RuntimeConfig + configName := fmt.Sprintf("runtimeconfig-test-%s", acctest.RandString(10)) + description := "my test description" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckRuntimeconfigConfigDestroy, + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigConfig_basicDescription(configName, description), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, description), + ), + }, { + Config: testAccRuntimeconfigConfig_emptyDescription(configName), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, ""), + ), + }, + }, + }) +} + +func testAccCheckRuntimeConfigDescription(runtimeConfig *runtimeconfig.RuntimeConfig, description string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if runtimeConfig.Description != description { + return fmt.Errorf("On runtime config '%s', expected description '%s', but found '%s'", + runtimeConfig.Name, description, runtimeConfig.Description) + } + return nil + } +} + +func testAccCheckRuntimeConfigExists(resourceName string, runtimeConfig *runtimeconfig.RuntimeConfig) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + found, err := config.clientRuntimeconfig.Projects.Configs.Get(rs.Primary.ID).Do() + if err != nil { + return err + } + + *runtimeConfig = *found + + return nil + } +} + +func testAccCheckRuntimeconfigConfigDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_runtimeconfig_config" { + continue + } + + _, err := config.clientRuntimeconfig.Projects.Configs.Get(rs.Primary.ID).Do() + + if err == nil { + return fmt.Errorf("Runtimeconfig still exists") + } + } + + return nil +} + +func testAccRuntimeconfigConfig_basicDescription(name, description string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "%s" + description = "%s" +}`, name, description) +} + +func testAccRuntimeconfigConfig_emptyDescription(name string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "%s" +}`, name) +} diff --git a/provider/terraform/tests/resource_runtimeconfig_variable_test.go b/provider/terraform/tests/resource_runtimeconfig_variable_test.go new file mode 100644 index 000000000000..b2e83e274b57 --- /dev/null +++ b/provider/terraform/tests/resource_runtimeconfig_variable_test.go @@ -0,0 +1,290 @@ +package google + +import ( + "fmt" + "regexp" + "testing" + "time" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/runtimeconfig/v1beta1" +) + +func TestAccRuntimeconfigVariable_basic(t *testing.T) { + t.Parallel() + + var variable runtimeconfig.Variable + + varName := fmt.Sprintf("variable-test-%s", acctest.RandString(10)) + varText := "this is my test value" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckRuntimeconfigVariableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigVariable_basicText(varName, varText), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeconfigVariableExists( + "google_runtimeconfig_variable.foobar", &variable), + testAccCheckRuntimeconfigVariableText(&variable, varText), + testAccCheckRuntimeconfigVariableUpdateTime("google_runtimeconfig_variable.foobar"), + ), + }, + resource.TestStep{ + ResourceName: "google_runtimeconfig_variable.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccRuntimeconfigVariable_basicUpdate(t *testing.T) { + t.Parallel() + + var variable runtimeconfig.Variable + + configName := fmt.Sprintf("some-name-%s", acctest.RandString(10)) + varName := fmt.Sprintf("variable-test-%s", acctest.RandString(10)) + varText := "this is my test value" + varText2 := "this is my updated value" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckRuntimeconfigVariableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigVariable_basicTextUpdate(configName, varName, varText), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeconfigVariableExists( + "google_runtimeconfig_variable.foobar", &variable), + testAccCheckRuntimeconfigVariableText(&variable, varText), + ), + }, { + Config: testAccRuntimeconfigVariable_basicTextUpdate(configName, varName, varText2), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeconfigVariableExists( + "google_runtimeconfig_variable.foobar", &variable), + testAccCheckRuntimeconfigVariableText(&variable, varText2), + ), + }, + }, + }) +} + +func TestAccRuntimeconfigVariable_basicValue(t *testing.T) { + t.Parallel() + + var variable runtimeconfig.Variable + + varName := fmt.Sprintf("variable-test-%s", acctest.RandString(10)) + varValue := "Zm9vYmFyCg==" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckRuntimeconfigVariableDestroy, + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigVariable_basicValue(varName, varValue), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeconfigVariableExists( + "google_runtimeconfig_variable.foobar", &variable), + testAccCheckRuntimeconfigVariableValue(&variable, varValue), + testAccCheckRuntimeconfigVariableUpdateTime("google_runtimeconfig_variable.foobar"), + ), + }, + resource.TestStep{ + ResourceName: "google_runtimeconfig_variable.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccRuntimeconfigVariable_errorsOnBothValueAndText(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigVariable_invalidBothTextValue(), + ExpectError: regexp.MustCompile("conflicts with"), + }, + }, + }) +} + +func TestAccRuntimeconfigVariable_errorsOnMissingValueAndText(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigVariable_invalidMissingTextValue(), + ExpectError: regexp.MustCompile("You must specify one of value or text"), + }, + }, + }) +} + +func testAccCheckRuntimeconfigVariableExists(resourceName string, variable *runtimeconfig.Variable) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + found, err := config.clientRuntimeconfig.Projects.Configs.Variables.Get(rs.Primary.ID).Do() + if err != nil { + return err + } + + *variable = *found + + return nil + } +} + +func testAccCheckRuntimeconfigVariableUpdateTime(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + updateTime := rs.Primary.Attributes["update_time"] + if updateTime == "" { + return fmt.Errorf("No update time set for resource %s", resourceName) + } + + // Make sure it's a valid rfc 3339 date + _, err := time.Parse(time.RFC3339, updateTime) + if err != nil { + return fmt.Errorf("Error while parsing update time for resource %s: %s", resourceName, err.Error()) + } + + return nil + } +} + +func testAccCheckRuntimeconfigVariableText(variable *runtimeconfig.Variable, text string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if variable.Text != text { + return fmt.Errorf("Variable %s had incorrect text: expected '%s' but found '%s'", variable.Name, + text, variable.Text) + } + + return nil + } +} + +func testAccCheckRuntimeconfigVariableValue(variable *runtimeconfig.Variable, value string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if variable.Value != value { + return fmt.Errorf("Variable %s had incorrect value: expected '%s' but found '%s'", variable.Name, + value, variable.Value) + } + + return nil + } +} + +func testAccCheckRuntimeconfigVariableDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_runtimeconfig_variable" { + continue + } + + _, err := config.clientRuntimeconfig.Projects.Configs.Variables.Get(rs.Primary.ID).Do() + + if err == nil { + return fmt.Errorf("Runtimeconfig variable still exists") + } + } + + return nil +} + +func testAccRuntimeconfigVariable_basicText(name, text string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "some-config-%s" +} + +resource "google_runtimeconfig_variable" "foobar" { + parent = "${google_runtimeconfig_config.foobar.name}" + name = "%s" + text = "%s" +}`, acctest.RandString(10), name, text) +} + +func testAccRuntimeconfigVariable_basicTextUpdate(configName, name, text string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "%s" +} + +resource "google_runtimeconfig_variable" "foobar" { + parent = "${google_runtimeconfig_config.foobar.name}" + name = "%s" + text = "%s" +}`, configName, name, text) +} + +func testAccRuntimeconfigVariable_basicValue(name, value string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "some-config-%s" +} + +resource "google_runtimeconfig_variable" "foobar" { + parent = "${google_runtimeconfig_config.foobar.name}" + name = "%s" + value = "%s" +}`, acctest.RandString(10), name, value) +} + +func testAccRuntimeconfigVariable_invalidBothTextValue() string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "some-config-%s" +} + +resource "google_runtimeconfig_variable" "foobar" { + parent = "${google_runtimeconfig_config.foobar.name}" + name = "%s" + text = "here's my value" + value = "Zm9vYmFyCg==" +}`, acctest.RandString(10), acctest.RandString(10)) +} + +func testAccRuntimeconfigVariable_invalidMissingTextValue() string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "some-config-%s" +} + +resource "google_runtimeconfig_variable" "foobar" { + parent = "${google_runtimeconfig_config.foobar.name}" + name = "my-variable-namespace/%s" +}`, acctest.RandString(10), acctest.RandString(10)) +} diff --git a/provider/terraform/tests/resource_source_repos_repository_test.go b/provider/terraform/tests/resource_source_repos_repository_test.go new file mode 100644 index 000000000000..ad1e74b6f2ed --- /dev/null +++ b/provider/terraform/tests/resource_source_repos_repository_test.go @@ -0,0 +1,88 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccSourceRepoRepository_basic(t *testing.T) { + t.Parallel() + + repositoryName := fmt.Sprintf("source-repo-repository-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckSourceRepoRepositoryDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccSourceRepoRepository_basic(repositoryName), + Check: resource.ComposeTestCheckFunc( + testAccCheckSourceRepoRepositoryExists( + "google_sourcerepo_repository.acceptance", repositoryName), + ), + }, + resource.TestStep{ + ResourceName: "google_sourcerepo_repository.acceptance", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckSourceRepoRepositoryDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type == "google_sourcerepo_repository" { + repositoryName := buildRepositoryName(config.Project, rs.Primary.Attributes["name"]) + + _, err := config.clientSourceRepo.Projects.Repos.Get(repositoryName).Do() + if err == nil { + return fmt.Errorf(repositoryName + "Source Repository still exists") + } + } + } + + return nil +} + +func testAccCheckSourceRepoRepositoryExists(resourceType, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceType] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + repositoryName := buildRepositoryName(config.Project, resourceName) + + resp, err := config.clientSourceRepo.Projects.Repos.Get(repositoryName).Do() + + if err != nil { + return fmt.Errorf("Error confirming Source Repository existence: %#v", err) + } + + if resp.Name != repositoryName { + return fmt.Errorf("Failed to verify Source Repository by name") + } + return nil + } +} + +func testAccSourceRepoRepository_basic(repositoryName string) string { + return fmt.Sprintf(` + resource "google_sourcerepo_repository" "acceptance" { + name = "%s" + } + `, repositoryName) +} diff --git a/provider/terraform/tests/resource_spanner_database_iam_test.go b/provider/terraform/tests/resource_spanner_database_iam_test.go new file mode 100644 index 000000000000..058997dbc046 --- /dev/null +++ b/provider/terraform/tests/resource_spanner_database_iam_test.go @@ -0,0 +1,246 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccSpannerDatabaseIamBinding(t *testing.T) { + t.Parallel() + + account := acctest.RandomWithPrefix("tf-test") + role := "roles/spanner.databaseAdmin" + project := getTestProjectFromEnv() + database := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + instance := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabaseIamBinding_basic(account, instance, database, role), + }, + resource.TestStep{ + ResourceName: "google_spanner_database_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s %s", spannerDatabaseId{ + Project: project, + Instance: instance, + Database: database, + }.terraformId(), role), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Binding update + Config: testAccSpannerDatabaseIamBinding_update(account, instance, database, role), + }, + resource.TestStep{ + ResourceName: "google_spanner_database_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s %s", spannerDatabaseId{ + Project: project, + Instance: instance, + Database: database, + }.terraformId(), role), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSpannerDatabaseIamMember(t *testing.T) { + t.Parallel() + + project := getTestProjectFromEnv() + account := acctest.RandomWithPrefix("tf-test") + role := "roles/spanner.databaseAdmin" + database := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + instance := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccSpannerDatabaseIamMember_basic(account, instance, database, role), + }, + resource.TestStep{ + ResourceName: "google_spanner_database_iam_member.foo", + ImportStateId: fmt.Sprintf("%s %s serviceAccount:%s@%s.iam.gserviceaccount.com", spannerDatabaseId{ + Instance: instance, + Database: database, + Project: project, + }.terraformId(), role, account, project), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSpannerDatabaseIamPolicy(t *testing.T) { + t.Parallel() + + project := getTestProjectFromEnv() + account := acctest.RandomWithPrefix("tf-test") + role := "roles/spanner.databaseAdmin" + database := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + instance := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabaseIamPolicy_basic(account, instance, database, role), + }, + // Test a few import formats + resource.TestStep{ + ResourceName: "google_spanner_database_iam_policy.foo", + ImportStateId: fmt.Sprintf("%s", spannerDatabaseId{ + Instance: instance, + Database: database, + Project: project, + }.terraformId()), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccSpannerDatabaseIamBinding_basic(account, instance, database, roleId string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Spanner Iam Testing Account" +} + +resource "google_spanner_instance" "instance" { + name = "%s" + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} + +resource "google_spanner_database" "database" { + instance = "${google_spanner_instance.instance.name}" + name = "%s" +} + +resource "google_spanner_database_iam_binding" "foo" { + project = "${google_spanner_database.database.project}" + database = "${google_spanner_database.database.name}" + instance = "${google_spanner_database.database.instance}" + role = "%s" + members = ["serviceAccount:${google_service_account.test_account.email}"] +} +`, account, instance, instance, database, roleId) +} + +func testAccSpannerDatabaseIamBinding_update(account, instance, database, roleId string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Spanner Iam Testing Account" +} + +resource "google_service_account" "test_account_2" { + account_id = "%s-2" + display_name = "Spanner Iam Testing Account" +} + +resource "google_spanner_instance" "instance" { + name = "%s" + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} + +resource "google_spanner_database" "database" { + instance = "${google_spanner_instance.instance.name}" + name = "%s" +} + +resource "google_spanner_database_iam_binding" "foo" { + project = "${google_spanner_database.database.project}" + database = "${google_spanner_database.database.name}" + instance = "${google_spanner_database.database.instance}" + role = "%s" + members = [ + "serviceAccount:${google_service_account.test_account.email}", + "serviceAccount:${google_service_account.test_account_2.email}" + ] +} +`, account, account, instance, instance, database, roleId) +} + +func testAccSpannerDatabaseIamMember_basic(account, instance, database, roleId string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Spanner Iam Testing Account" +} + +resource "google_spanner_instance" "instance" { + name = "%s" + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} + +resource "google_spanner_database" "database" { + instance = "${google_spanner_instance.instance.name}" + name = "%s" +} + +resource "google_spanner_database_iam_member" "foo" { + project = "${google_spanner_database.database.project}" + database = "${google_spanner_database.database.name}" + instance = "${google_spanner_database.database.instance}" + role = "%s" + member = "serviceAccount:${google_service_account.test_account.email}" +} +`, account, instance, instance, database, roleId) +} + +func testAccSpannerDatabaseIamPolicy_basic(account, instance, database, roleId string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Spanner Iam Testing Account" +} + +resource "google_spanner_instance" "instance" { + name = "%s" + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} + +resource "google_spanner_database" "database" { + instance = "${google_spanner_instance.instance.name}" + name = "%s" +} + +data "google_iam_policy" "foo" { + binding { + role = "%s" + + members = ["serviceAccount:${google_service_account.test_account.email}"] + } +} + +resource "google_spanner_database_iam_policy" "foo" { + project = "${google_spanner_database.database.project}" + database = "${google_spanner_database.database.name}" + instance = "${google_spanner_database.database.instance}" + policy_data = "${data.google_iam_policy.foo.policy_data}" +} +`, account, instance, instance, database, roleId) +} diff --git a/provider/terraform/tests/resource_spanner_database_test.go b/provider/terraform/tests/resource_spanner_database_test.go new file mode 100644 index 000000000000..73e3f71f669d --- /dev/null +++ b/provider/terraform/tests/resource_spanner_database_test.go @@ -0,0 +1,237 @@ +package google + +import ( + "fmt" + "net/http" + "strings" + "testing" + + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "google.golang.org/api/googleapi" +) + +// Unit Tests + +func TestDatabaseNameForApi(t *testing.T) { + id := spannerDatabaseId{ + Project: "project123", + Instance: "instance456", + Database: "db789", + } + actual := id.databaseUri() + expected := "projects/project123/instances/instance456/databases/db789" + expectEquals(t, expected, actual) +} + +func TestImportSpannerDatabaseId_InstanceDB(t *testing.T) { + id, e := importSpannerDatabaseId("instance456/database789") + if e != nil { + t.Errorf("Error should have been nil") + } + expectEquals(t, "", id.Project) + expectEquals(t, "instance456", id.Instance) + expectEquals(t, "database789", id.Database) +} + +func TestImportSpannerDatabaseId_ProjectInstanceDB(t *testing.T) { + id, e := importSpannerDatabaseId("project123/instance456/database789") + if e != nil { + t.Errorf("Error should have been nil") + } + expectEquals(t, "project123", id.Project) + expectEquals(t, "instance456", id.Instance) + expectEquals(t, "database789", id.Database) +} + +func TestImportSpannerDatabaseId_projectId(t *testing.T) { + shouldPass := []string{ + "project-id/instance/database", + "123123/instance/123", + "hashicorptest.net:project-123/instance/123", + "123/456/789", + } + + shouldFail := []string{ + "project-id#/instance/database", + "project-id/instance#/database", + "project-id/instance/database#", + "hashicorptest.net:project-123:invalid:project/instance/123", + "hashicorptest.net:/instance/123", + } + + for _, element := range shouldPass { + _, e := importSpannerDatabaseId(element) + if e != nil { + t.Error("importSpannerDatabaseId should pass on '" + element + "' but doesn't") + } + } + + for _, element := range shouldFail { + _, e := importSpannerDatabaseId(element) + if e == nil { + t.Error("importSpannerDatabaseId should fail on '" + element + "' but doesn't") + } + } +} + +func TestImportSpannerDatabaseId_invalidLeadingSlash(t *testing.T) { + id, e := importSpannerDatabaseId("/instance456/database789") + expectInvalidSpannerDbImportId(t, id, e) +} + +func TestImportSpannerDatabaseId_invalidTrailingSlash(t *testing.T) { + id, e := importSpannerDatabaseId("instance456/database789/") + expectInvalidSpannerDbImportId(t, id, e) +} + +func TestImportSpannerDatabaseId_invalidSingleSlash(t *testing.T) { + id, e := importSpannerDatabaseId("/") + expectInvalidSpannerDbImportId(t, id, e) +} + +func TestImportSpannerDatabaseId_invalidMultiSlash(t *testing.T) { + id, e := importSpannerDatabaseId("project123/instance456/db789/next") + expectInvalidSpannerDbImportId(t, id, e) +} + +func expectInvalidSpannerDbImportId(t *testing.T, id *spannerDatabaseId, e error) { + if id != nil { + t.Errorf("Expected spannerDatabaseId to be nil") + return + } + if e == nil { + t.Errorf("Expected an Error but did not get one") + return + } + if !strings.HasPrefix(e.Error(), "Invalid spanner database specifier") { + t.Errorf("Expecting Error starting with 'Invalid spanner database specifier'") + } +} + +// Acceptance Tests + +func TestAccSpannerDatabase_basic(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckSpannerDatabaseDestroy, + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabase_basic(rnd), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + ), + }, + { + ResourceName: "google_spanner_database.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSpannerDatabase_basicWithInitialDDL(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(10) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckSpannerDatabaseDestroy, + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabase_basicWithInitialDDL(rnd), + }, + { + ResourceName: "google_spanner_database.basic", + ImportState: true, + ImportStateVerify: true, + // DDL statements get issued at the time the create/update + // occurs, which means storing them in state isn't really + // necessary. + ImportStateVerifyIgnore: []string{"ddl"}, + }, + }, + }) +} + +func testAccCheckSpannerDatabaseDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_spanner_database" { + continue + } + + if rs.Primary.ID == "" { + return fmt.Errorf("Unable to verify delete of spanner database, ID is empty") + } + + project, err := getTestProject(rs.Primary, config) + if err != nil { + return err + } + + id := spannerDatabaseId{ + Project: project, + Instance: rs.Primary.Attributes["instance"], + Database: rs.Primary.Attributes["name"], + } + _, err = config.clientSpanner.Projects.Instances.Databases.Get( + id.databaseUri()).Do() + + if err == nil { + return fmt.Errorf("Spanner database still exists") + } + + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound { + return nil + } + return errwrap.Wrapf("Error verifying spanner database deleted: {{err}}", err) + } + + return nil +} + +func testAccSpannerDatabase_basic(rnd string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "my-instance-%s" + config = "regional-us-central1" + display_name = "my-displayname-%s" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = "${google_spanner_instance.basic.name}" + name = "my-db-%s" +} +`, rnd, rnd, rnd) +} + +func testAccSpannerDatabase_basicWithInitialDDL(rnd string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "my-instance-%s" + config = "regional-us-central1" + display_name = "my-displayname-%s" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = "${google_spanner_instance.basic.name}" + name = "my-db-%s" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)" ] +} +`, rnd, rnd, rnd) +} diff --git a/provider/terraform/tests/resource_spanner_instance_iam_test.go b/provider/terraform/tests/resource_spanner_instance_iam_test.go new file mode 100644 index 000000000000..0609c8fc00ae --- /dev/null +++ b/provider/terraform/tests/resource_spanner_instance_iam_test.go @@ -0,0 +1,215 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccSpannerInstanceIamBinding(t *testing.T) { + t.Parallel() + + account := acctest.RandomWithPrefix("tf-test") + role := "roles/spanner.databaseAdmin" + project := getTestProjectFromEnv() + instance := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccSpannerInstanceIamBinding_basic(account, instance, role), + }, + resource.TestStep{ + ResourceName: "google_spanner_instance_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s %s", spannerInstanceId{ + Project: project, + Instance: instance, + }.terraformId(), role), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Binding update + Config: testAccSpannerInstanceIamBinding_update(account, instance, role), + }, + resource.TestStep{ + ResourceName: "google_spanner_instance_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s %s", spannerInstanceId{ + Project: project, + Instance: instance, + }.terraformId(), role), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSpannerInstanceIamMember(t *testing.T) { + t.Parallel() + + project := getTestProjectFromEnv() + account := acctest.RandomWithPrefix("tf-test") + role := "roles/spanner.databaseAdmin" + instance := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccSpannerInstanceIamMember_basic(account, instance, role), + }, + resource.TestStep{ + ResourceName: "google_spanner_instance_iam_member.foo", + ImportStateId: fmt.Sprintf("%s %s serviceAccount:%s@%s.iam.gserviceaccount.com", spannerInstanceId{ + Instance: instance, + Project: project, + }.terraformId(), role, account, project), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSpannerInstanceIamPolicy(t *testing.T) { + t.Parallel() + + project := getTestProjectFromEnv() + account := acctest.RandomWithPrefix("tf-test") + role := "roles/spanner.databaseAdmin" + instance := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccSpannerInstanceIamPolicy_basic(account, instance, role), + }, + // Test a few import formats + resource.TestStep{ + ResourceName: "google_spanner_instance_iam_policy.foo", + ImportStateId: fmt.Sprintf("%s", spannerInstanceId{ + Instance: instance, + Project: project, + }.terraformId()), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccSpannerInstanceIamBinding_basic(account, instance, roleId string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Spanner Iam Testing Account" +} + +resource "google_spanner_instance" "instance" { + name = "%s" + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} + +resource "google_spanner_instance_iam_binding" "foo" { + project = "${google_spanner_instance.instance.project}" + instance = "${google_spanner_instance.instance.name}" + role = "%s" + members = ["serviceAccount:${google_service_account.test_account.email}"] +} +`, account, instance, instance, roleId) +} + +func testAccSpannerInstanceIamBinding_update(account, instance, roleId string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Spanner Iam Testing Account" +} + +resource "google_service_account" "test_account_2" { + account_id = "%s-2" + display_name = "Spanner Iam Testing Account" +} + +resource "google_spanner_instance" "instance" { + name = "%s" + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} + +resource "google_spanner_instance_iam_binding" "foo" { + project = "${google_spanner_instance.instance.project}" + instance = "${google_spanner_instance.instance.name}" + role = "%s" + members = [ + "serviceAccount:${google_service_account.test_account.email}", + "serviceAccount:${google_service_account.test_account_2.email}" + ] +} +`, account, account, instance, instance, roleId) +} + +func testAccSpannerInstanceIamMember_basic(account, instance, roleId string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Spanner Iam Testing Account" +} + +resource "google_spanner_instance" "instance" { + name = "%s" + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} + +resource "google_spanner_instance_iam_member" "foo" { + project = "${google_spanner_instance.instance.project}" + instance = "${google_spanner_instance.instance.name}" + role = "%s" + member = "serviceAccount:${google_service_account.test_account.email}" +} +`, account, instance, instance, roleId) +} + +func testAccSpannerInstanceIamPolicy_basic(account, instance, roleId string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + account_id = "%s" + display_name = "Spanner Iam Testing Account" +} + +resource "google_spanner_instance" "instance" { + name = "%s" + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} + +data "google_iam_policy" "foo" { + binding { + role = "%s" + + members = ["serviceAccount:${google_service_account.test_account.email}"] + } +} + +resource "google_spanner_instance_iam_policy" "foo" { + project = "${google_spanner_instance.instance.project}" + instance = "${google_spanner_instance.instance.name}" + policy_data = "${data.google_iam_policy.foo.policy_data}" +} +`, account, instance, instance, roleId) +} diff --git a/provider/terraform/tests/resource_spanner_instance_test.go b/provider/terraform/tests/resource_spanner_instance_test.go new file mode 100644 index 000000000000..f26c86b29d1c --- /dev/null +++ b/provider/terraform/tests/resource_spanner_instance_test.go @@ -0,0 +1,301 @@ +package google + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "strings" + + "google.golang.org/api/googleapi" +) + +// Unit Tests + +func TestSpannerInstanceId_instanceUri(t *testing.T) { + id := spannerInstanceId{ + Project: "project123", + Instance: "instance456", + } + actual := id.instanceUri() + expected := "projects/project123/instances/instance456" + expectEquals(t, expected, actual) +} + +func TestSpannerInstanceId_instanceConfigUri(t *testing.T) { + id := spannerInstanceId{ + Project: "project123", + Instance: "instance456", + } + actual := id.instanceConfigUri("conf987") + expected := "projects/project123/instanceConfigs/conf987" + expectEquals(t, expected, actual) +} + +func TestSpannerInstanceId_parentProjectUri(t *testing.T) { + id := spannerInstanceId{ + Project: "project123", + Instance: "instance456", + } + actual := id.parentProjectUri() + expected := "projects/project123" + expectEquals(t, expected, actual) +} + +func TestGenSpannerInstanceName(t *testing.T) { + s := genSpannerInstanceName() + if len(s) != 30 { + t.Fatalf("Expected a 30 char ID to be generated, instead found %d chars", len(s)) + } +} + +func TestImportSpannerInstanceId(t *testing.T) { + sid, e := importSpannerInstanceId("instance456") + if e != nil { + t.Errorf("Error should have been nil") + } + expectEquals(t, "", sid.Project) + expectEquals(t, "instance456", sid.Instance) +} + +func TestImportSpannerInstanceId_projectAndInstance(t *testing.T) { + sid, e := importSpannerInstanceId("project123/instance456") + if e != nil { + t.Errorf("Error should have been nil") + } + expectEquals(t, "project123", sid.Project) + expectEquals(t, "instance456", sid.Instance) +} + +func TestImportSpannerInstanceId_invalidLeadingSlash(t *testing.T) { + sid, e := importSpannerInstanceId("/instance456") + expectInvalidSpannerInstanceImport(t, sid, e) +} + +func TestImportSpannerInstanceId_invalidTrailingSlash(t *testing.T) { + sid, e := importSpannerInstanceId("project123/") + expectInvalidSpannerInstanceImport(t, sid, e) +} + +func TestImportSpannerInstanceId_invalidSingleSlash(t *testing.T) { + sid, e := importSpannerInstanceId("/") + expectInvalidSpannerInstanceImport(t, sid, e) +} + +func TestImportSpannerInstanceId_invalidMultiSlash(t *testing.T) { + sid, e := importSpannerInstanceId("project123/instance456/db789") + expectInvalidSpannerInstanceImport(t, sid, e) +} + +func TestImportSpannerInstanceId_projectId(t *testing.T) { + shouldPass := []string{ + "project-id/instance", + "123123/instance", + "hashicorptest.net:project-123/instance", + "123/456", + } + + shouldFail := []string{ + "project-id#/instance", + "project-id/instance#", + "hashicorptest.net:project-123:invalid:project/instance", + "hashicorptest.net:/instance", + } + + for _, element := range shouldPass { + _, e := importSpannerInstanceId(element) + if e != nil { + t.Error("importSpannerInstanceId should pass on '" + element + "' but doesn't") + } + } + + for _, element := range shouldFail { + _, e := importSpannerInstanceId(element) + if e == nil { + t.Error("importSpannerInstanceId should fail on '" + element + "' but doesn't") + } + } +} + +func expectInvalidSpannerInstanceImport(t *testing.T, sid *spannerInstanceId, e error) { + if sid != nil { + t.Errorf("Expected spannerInstanceId to be nil") + return + } + if e == nil { + t.Errorf("Expected an Error but did not get one") + return + } + if !strings.HasPrefix(e.Error(), "Invalid spanner instance specifier") { + t.Errorf("Expecting Error starting with 'Invalid spanner instance specifier'") + } +} + +func expectEquals(t *testing.T, expected, actual string) { + if actual != expected { + t.Fatalf("Expected %s, but got %s", expected, actual) + } +} + +// Acceptance Tests + +func TestAccSpannerInstance_basic(t *testing.T) { + t.Parallel() + + idName := fmt.Sprintf("spanner-test-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckSpannerInstanceDestroy, + Steps: []resource.TestStep{ + { + Config: testAccSpannerInstance_basic(idName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_instance.basic", "state"), + ), + }, + { + ResourceName: "google_spanner_instance.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSpannerInstance_basicWithAutogenName(t *testing.T) { + t.Parallel() + + displayName := fmt.Sprintf("spanner-test-%s-dname", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckSpannerInstanceDestroy, + Steps: []resource.TestStep{ + { + Config: testAccSpannerInstance_basicWithAutogenName(displayName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_instance.basic", "name"), + ), + }, + { + ResourceName: "google_spanner_instance.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSpannerInstance_update(t *testing.T) { + t.Parallel() + + dName1 := fmt.Sprintf("spanner-dname1-%s", acctest.RandString(10)) + dName2 := fmt.Sprintf("spanner-dname2-%s", acctest.RandString(10)) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckSpannerInstanceDestroy, + Steps: []resource.TestStep{ + { + Config: testAccSpannerInstance_update(dName1, 1, false), + }, + { + ResourceName: "google_spanner_instance.updater", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccSpannerInstance_update(dName2, 2, true), + }, + { + ResourceName: "google_spanner_instance.updater", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckSpannerInstanceDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_spanner_instance" { + continue + } + + if rs.Primary.ID == "" { + return fmt.Errorf("Unable to verify delete of spanner instance, ID is empty") + } + + instanceName := rs.Primary.Attributes["name"] + project, err := getTestProject(rs.Primary, config) + if err != nil { + return err + } + + id := spannerInstanceId{ + Project: project, + Instance: instanceName, + } + _, err = config.clientSpanner.Projects.Instances.Get( + id.instanceUri()).Do() + + if err == nil { + return fmt.Errorf("Spanner instance still exists") + } + + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound { + return nil + } + return errwrap.Wrapf("Error verifying spanner instance deleted: {{err}}", err) + } + + return nil +} + +func testAccSpannerInstance_basic(name string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-dname" + num_nodes = 1 +} +`, name, name) +} + +func testAccSpannerInstance_basicWithAutogenName(name string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + config = "regional-us-central1" + display_name = "%s" + num_nodes = 1 +} +`, name) +} + +func testAccSpannerInstance_update(name string, nodes int, addLabel bool) string { + extraLabel := "" + if addLabel { + extraLabel = "\"key2\" = \"value2\"" + } + return fmt.Sprintf(` +resource "google_spanner_instance" "updater" { + config = "regional-us-central1" + display_name = "%s" + num_nodes = %d + + labels { + "key1" = "value1" + %s + } +} +`, name, nodes, extraLabel) +} diff --git a/provider/terraform/tests/resource_sql_database_instance_test.go b/provider/terraform/tests/resource_sql_database_instance_test.go new file mode 100644 index 000000000000..2de6139060f7 --- /dev/null +++ b/provider/terraform/tests/resource_sql_database_instance_test.go @@ -0,0 +1,1226 @@ +package google + +/** + * Note! You must run these tests once at a time. Google Cloud SQL does + * not allow you to reuse a database for a short time after you reserved it, + * and for this reason the tests will fail if the same config is used serveral + * times in short succession. + */ + +import ( + "fmt" + "log" + "strconv" + "strings" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "google.golang.org/api/sqladmin/v1beta4" +) + +// Fields that should be ignored in import tests because they aren't returned +// from GCP (and thus can't be imported) +var ignoredReplicaConfigurationFields = []string{ + "replica_configuration.0.ca_certificate", + "replica_configuration.0.client_certificate", + "replica_configuration.0.client_key", + "replica_configuration.0.connect_retry_interval", + "replica_configuration.0.dump_file_path", + "replica_configuration.0.master_heartbeat_period", + "replica_configuration.0.password", + "replica_configuration.0.ssl_cipher", + "replica_configuration.0.username", + "replica_configuration.0.verify_server_certificate", +} + +func init() { + resource.AddTestSweepers("gcp_sql_db_instance", &resource.Sweeper{ + Name: "gcp_sql_db_instance", + F: testSweepDatabases, + }) +} + +func testSweepDatabases(region string) error { + config, err := sharedConfigForRegion(region) + if err != nil { + return fmt.Errorf("error getting shared config for region: %s", err) + } + + err = config.loadAndValidate() + if err != nil { + log.Fatalf("error loading: %s", err) + } + + found, err := config.clientSqlAdmin.Instances.List(config.Project).Do() + if err != nil { + log.Fatalf("error listing databases: %s", err) + } + + if len(found.Items) == 0 { + log.Printf("No databases found") + return nil + } + + running := map[string]struct{}{} + + for _, d := range found.Items { + var testDbInstance bool + for _, testName := range []string{"tf-lw-", "sqldatabasetest"} { + // only destroy instances we know to fit our test naming pattern + if strings.HasPrefix(d.Name, testName) { + testDbInstance = true + } + } + + if !testDbInstance { + continue + } + if d.State != "RUNNABLE" { + continue + } + running[d.Name] = struct{}{} + } + + for _, d := range found.Items { + // don't delete replicas, we'll take care of that + // when deleting the database they replicate + if d.ReplicaConfiguration != nil { + continue + } + log.Printf("Destroying SQL Instance (%s)", d.Name) + + // replicas need to be stopped and destroyed before destroying a master + // instance. The ordering slice tracks replica databases for a given master + // and we call destroy on them before destroying the master + var ordering []string + for _, replicaName := range d.ReplicaNames { + // don't try to stop replicas that aren't running + if _, ok := running[replicaName]; !ok { + ordering = append(ordering, replicaName) + continue + } + + // need to stop replication before being able to destroy a database + op, err := config.clientSqlAdmin.Instances.StopReplica(config.Project, replicaName).Do() + + if err != nil { + return fmt.Errorf("error, failed to stop replica instance (%s) for instance (%s): %s", replicaName, d.Name, err) + } + + err = sqladminOperationWait(config, op, config.Project, "Stop Replica") + if err != nil { + if strings.Contains(err.Error(), "does not exist") { + log.Printf("Replication operation not found") + } else { + return err + } + } + + ordering = append(ordering, replicaName) + } + + // ordering has a list of replicas (or none), now add the primary to the end + ordering = append(ordering, d.Name) + + for _, db := range ordering { + // destroy instances, replicas first + op, err := config.clientSqlAdmin.Instances.Delete(config.Project, db).Do() + + if err != nil { + if strings.Contains(err.Error(), "409") { + // the GCP api can return a 409 error after the delete operation + // reaches a successful end + log.Printf("Operation not found, got 409 response") + continue + } + + return fmt.Errorf("Error, failed to delete instance %s: %s", db, err) + } + + err = sqladminOperationWait(config, op, config.Project, "Delete Instance") + if err != nil { + if strings.Contains(err.Error(), "does not exist") { + log.Printf("SQL instance not found") + continue + } + return err + } + } + } + + return nil +} + +func TestAccSqlDatabaseInstance_basicFirstGen(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + instanceID := acctest.RandInt() + instanceName := fmt.Sprintf("tf-lw-%d", instanceID) + resourceName := "google_sql_database_instance.instance" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_basic, instanceID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists(resourceName, &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals(resourceName, &instance), + ), + }, + resource.TestStep{ + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + ResourceName: resourceName, + ImportStateId: fmt.Sprintf("projects/%s/instances/%s", getTestProjectFromEnv(), instanceName), + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + ResourceName: resourceName, + ImportStateId: fmt.Sprintf("%s/%s", getTestProjectFromEnv(), instanceName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_basicInferredName(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleSqlDatabaseInstance_basic2, + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_basicSecondGen(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + databaseID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_basic3, databaseID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseRootUserDoesNotExist( + &instance), + ), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_dontDeleteDefaultUserOnReplica(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + databaseName := "sql-instance-test-" + acctest.RandString(10) + failoverName := "sql-instance-test-failover-" + acctest.RandString(10) + // 1. Create an instance. + // 2. Add a root@'%' user. + // 3. Create a replica and assert it succeeds (it'll fail if we try to delete the root user thinking it's a + // default user) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleSqlDatabaseInstanceConfig_withoutReplica(databaseName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, resource.TestStep{ + PreConfig: func() { + // Add a root user + config := testAccProvider.Meta().(*Config) + user := sqladmin.User{ + Name: "root", + Host: "%", + Password: acctest.RandString(26), + } + op, err := config.clientSqlAdmin.Users.Insert(config.Project, databaseName, &user).Do() + if err != nil { + t.Errorf("Error while inserting root@%% user: %s", err) + return + } + err = sqladminOperationWait(config, op, config.Project, "Waiting for user to insert") + if err != nil { + t.Errorf("Error while waiting for user insert operation to complete: %s", err.Error()) + } + // User was created, now create replica + }, + Config: testGoogleSqlDatabaseInstanceConfig_withReplica(databaseName, failoverName), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_settings_basic(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + databaseID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_settings, databaseID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_replica(t *testing.T) { + t.Parallel() + + databaseID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_replica, databaseID, databaseID, databaseID), + }, + resource.TestStep{ + ResourceName: "google_sql_database_instance.instance_master", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + ResourceName: "google_sql_database_instance.replica1", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: ignoredReplicaConfigurationFields, + }, + resource.TestStep{ + ResourceName: "google_sql_database_instance.replica2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: ignoredReplicaConfigurationFields, + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_slave(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + masterID := acctest.RandInt() + slaveID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_slave, masterID, slaveID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance_master", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance_master", &instance), + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance_slave", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance_slave", &instance), + ), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_highAvailability(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + instanceID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_highAvailability, instanceID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + // Check that we've set our high availability type correctly, and it's been + // accepted by the API + func(s *terraform.State) error { + if instance.Settings.AvailabilityType != "REGIONAL" { + return fmt.Errorf("Database %s was not configured with Regional HA", instance.Name) + } + + return nil + }, + ), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_diskspecs(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + masterID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_diskspecs, masterID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_maintenance(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + masterID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_maintenance, masterID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_settings_upgrade(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + databaseID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_basic, databaseID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_settings, databaseID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_settingsDowngrade(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + databaseID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_settings, databaseID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_basic, databaseID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + }, + }) +} + +// GH-4222 +func TestAccSqlDatabaseInstance_authNets(t *testing.T) { + t.Parallel( + // var instance sqladmin.DatabaseInstance + ) + + databaseID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_authNets_step1, databaseID), + }, + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_authNets_step2, databaseID), + }, + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_authNets_step1, databaseID), + }, + }, + }) +} + +// Tests that a SQL instance can be referenced from more than one other resource without +// throwing an error during provisioning, see #9018. +func TestAccSqlDatabaseInstance_multipleOperations(t *testing.T) { + t.Parallel() + + databaseID, instanceID, userID := acctest.RandString(8), acctest.RandString(8), acctest.RandString(8) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_multipleOperations, databaseID, instanceID, userID), + }, + }, + }) +} + +func TestAccSqlDatabaseInstance_basic_with_user_labels(t *testing.T) { + t.Parallel() + + var instance sqladmin.DatabaseInstance + databaseID := acctest.RandInt() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseInstanceDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_basic_with_user_labels, databaseID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseRootUserDoesNotExist( + &instance), + ), + }, + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabaseInstance_basic_with_user_labels_update, databaseID), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseInstanceExists( + "google_sql_database_instance.instance", &instance), + testAccCheckGoogleSqlDatabaseInstanceEquals( + "google_sql_database_instance.instance", &instance), + ), + }, + }, + }) +} + +func testAccCheckGoogleSqlDatabaseInstanceEquals(n string, + instance *sqladmin.DatabaseInstance) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + attributes := rs.Primary.Attributes + + server := instance.Name + local := attributes["name"] + if server != local { + return fmt.Errorf("Error name mismatch, (%s, %s)", server, local) + } + + server = instance.Settings.Tier + local = attributes["settings.0.tier"] + if server != local { + return fmt.Errorf("Error settings.tier mismatch, (%s, %s)", server, local) + } + + server = strings.TrimPrefix(instance.MasterInstanceName, instance.Project+":") + local = attributes["master_instance_name"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error master_instance_name mismatch, (%s, %s)", server, local) + } + + ip_len, err := strconv.Atoi(attributes["ip_address.#"]) + if err != nil { + return fmt.Errorf("Error parsing ip_addresses.# : %s", err.Error()) + } + if ip_len != len(instance.IpAddresses) { + return fmt.Errorf("Error ip_addresses.# mismatch, server has %d but local has %d", len(instance.IpAddresses), ip_len) + } + // For now, assume the order matches + for idx, ip := range instance.IpAddresses { + server = attributes["ip_address."+strconv.Itoa(idx)+".ip_address"] + local = ip.IpAddress + if server != local { + return fmt.Errorf("Error ip_addresses.%d.ip_address mismatch, server has %s but local has %s", idx, server, local) + } + + server = attributes["ip_address."+strconv.Itoa(idx)+".time_to_retire"] + local = ip.TimeToRetire + if server != local { + return fmt.Errorf("Error ip_addresses.%d.time_to_retire mismatch, server has %s but local has %s", idx, server, local) + } + } + + if len(instance.IpAddresses) > 0 { + server = instance.IpAddresses[0].IpAddress + local = attributes["first_ip_address"] + if server != local { + return fmt.Errorf("Error first_ip_address mismatch, server has %s but local has %s", server, local) + } + } + + server = instance.Settings.ActivationPolicy + local = attributes["settings.0.activation_policy"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.activation_policy mismatch, (%s, %s)", server, local) + } + + server = instance.Settings.AvailabilityType + local = attributes["settings.0.availability_type"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.availability_type mismatch, (%s, %s)", server, local) + } + + if instance.Settings.BackupConfiguration != nil { + server = strconv.FormatBool(instance.Settings.BackupConfiguration.BinaryLogEnabled) + local = attributes["settings.0.backup_configuration.0.binary_log_enabled"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.backup_configuration.binary_log_enabled mismatch, (%s, %s)", server, local) + } + + server = strconv.FormatBool(instance.Settings.BackupConfiguration.Enabled) + local = attributes["settings.0.backup_configuration.0.enabled"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.backup_configuration.enabled mismatch, (%s, %s)", server, local) + } + + server = instance.Settings.BackupConfiguration.StartTime + local = attributes["settings.0.backup_configuration.0.start_time"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.backup_configuration.start_time mismatch, (%s, %s)", server, local) + } + } + + server = strconv.FormatBool(instance.Settings.CrashSafeReplicationEnabled) + local = attributes["settings.0.crash_safe_replication"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.crash_safe_replication mismatch, (%s, %s)", server, local) + } + + // First generation CloudSQL instances will not have any value for StorageAutoResize. + // We need to check if this value has been omitted before we potentially deference a + // nil pointer. + if instance.Settings.StorageAutoResize != nil { + server = strconv.FormatBool(*instance.Settings.StorageAutoResize) + local = attributes["settings.0.disk_autoresize"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.disk_autoresize mismatch, (%s, %s)", server, local) + } + } + + server = strconv.FormatInt(instance.Settings.DataDiskSizeGb, 10) + local = attributes["settings.0.disk_size"] + if server != local && len(server) > 0 && len(local) > 0 && local != "0" { + return fmt.Errorf("Error settings.disk_size mismatch, (%s, %s)", server, local) + } + + server = instance.Settings.DataDiskType + local = attributes["settings.0.disk_type"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.disk_type mismatch, (%s, %s)", server, local) + } + + if instance.Settings.IpConfiguration != nil { + server = strconv.FormatBool(instance.Settings.IpConfiguration.Ipv4Enabled) + local = attributes["settings.0.ip_configuration.0.ipv4_enabled"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.ip_configuration.ipv4_enabled mismatch, (%s, %s)", server, local) + } + + server = strconv.FormatBool(instance.Settings.IpConfiguration.RequireSsl) + local = attributes["settings.0.ip_configuration.0.require_ssl"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.ip_configuration.require_ssl mismatch, (%s, %s)", server, local) + } + } + + if instance.Settings.LocationPreference != nil { + server = instance.Settings.LocationPreference.FollowGaeApplication + local = attributes["settings.0.location_preference.0.follow_gae_application"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.location_preference.follow_gae_application mismatch, (%s, %s)", server, local) + } + + server = instance.Settings.LocationPreference.Zone + local = attributes["settings.0.location_preference.0.zone"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.location_preference.zone mismatch, (%s, %s)", server, local) + } + } + + if instance.Settings.MaintenanceWindow != nil { + server = strconv.FormatInt(instance.Settings.MaintenanceWindow.Day, 10) + local = attributes["settings.0.maintenance_window.0.day"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.maintenance_window.day mismatch, (%s, %s)", server, local) + } + + server = strconv.FormatInt(instance.Settings.MaintenanceWindow.Hour, 10) + local = attributes["settings.0.maintenance_window.0.hour"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.maintenance_window.hour mismatch, (%s, %s)", server, local) + } + + server = instance.Settings.MaintenanceWindow.UpdateTrack + local = attributes["settings.0.maintenance_window.0.update_track"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.maintenance_window.update_track mismatch, (%s, %s)", server, local) + } + } + + server = instance.Settings.PricingPlan + local = attributes["settings.0.pricing_plan"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error settings.pricing_plan mismatch, (%s, %s)", server, local) + } + + if instance.Settings.UserLabels != nil { + server := instance.Settings.UserLabels["location"] + local = attributes["settings.0.user_labels.location"] + + if server != local { + return fmt.Errorf("Error settings.user_labels.location mismatch, (%s, %s)", server, local) + } + + server = instance.Settings.UserLabels["track"] + local = attributes["settings.0.user_labels.track"] + + if server != local { + return fmt.Errorf("Error settings.user_labels.track mismatch, (%s, %s)", server, local) + } + } + + if instance.ReplicaConfiguration != nil { + server = strconv.FormatBool(instance.ReplicaConfiguration.FailoverTarget) + local = attributes["replica_configuration.0.failover_target"] + if server != local && len(server) > 0 && len(local) > 0 { + return fmt.Errorf("Error replica_configuration.failover_target mismatch, (%s, %s)", server, local) + } + } + + server = instance.ConnectionName + local = attributes["connection_name"] + if server != local { + return fmt.Errorf("Error connection_name mismatch. (%s, %s)", server, local) + } + + return nil + } +} + +func testAccCheckGoogleSqlDatabaseInstanceExists(n string, + instance *sqladmin.DatabaseInstance) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + found, err := config.clientSqlAdmin.Instances.Get(config.Project, + rs.Primary.Attributes["name"]).Do() + + *instance = *found + + if err != nil { + return fmt.Errorf("Not found: %s", n) + } + + return nil + } +} + +func testAccSqlDatabaseInstanceDestroy(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + config := testAccProvider.Meta().(*Config) + if rs.Type != "google_sql_database_instance" { + continue + } + + _, err := config.clientSqlAdmin.Instances.Get(config.Project, + rs.Primary.Attributes["name"]).Do() + if err == nil { + return fmt.Errorf("Database Instance still exists") + } + } + + return nil +} + +func testAccCheckGoogleSqlDatabaseRootUserDoesNotExist( + instance *sqladmin.DatabaseInstance) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + users, err := config.clientSqlAdmin.Users.List(config.Project, instance.Name).Do() + + if err != nil { + return fmt.Errorf("Could not list database users for %q: %s", instance.Name, err) + } + + for _, u := range users.Items { + if u.Name == "root" && u.Host == "%" { + return fmt.Errorf("%v@%v user still exists", u.Name, u.Host) + } + } + + return nil + } +} + +var testGoogleSqlDatabaseInstance_basic = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central" + settings { + tier = "D0" + crash_safe_replication = false + } +} +` + +var testGoogleSqlDatabaseInstance_basic2 = ` +resource "google_sql_database_instance" "instance" { + region = "us-central" + settings { + tier = "D0" + crash_safe_replication = false + } +} +` +var testGoogleSqlDatabaseInstance_basic3 = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central1" + settings { + tier = "db-f1-micro" + } +} +` + +func testGoogleSqlDatabaseInstanceConfig_withoutReplica(instanceName string) string { + return fmt.Sprintf(`resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_5_7" + + settings { + tier = "db-n1-standard-1" + + backup_configuration { + binary_log_enabled = "true" + enabled = "true" + start_time = "18:00" + } + } +}`, instanceName) +} + +func testGoogleSqlDatabaseInstanceConfig_withReplica(instanceName, failoverName string) string { + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_5_7" + + settings { + tier = "db-n1-standard-1" + + backup_configuration { + binary_log_enabled = "true" + enabled = "true" + start_time = "18:00" + } + } +} + +resource "google_sql_database_instance" "instance-failover" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_5_7" + master_instance_name = "${google_sql_database_instance.instance.name}" + + replica_configuration { + failover_target = "true" + } + + settings { + tier = "db-n1-standard-1" + } +} +`, instanceName, failoverName) +} + +var testGoogleSqlDatabaseInstance_settings = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central" + settings { + tier = "D0" + crash_safe_replication = false + replication_type = "ASYNCHRONOUS" + location_preference { + zone = "us-central1-f" + } + + ip_configuration { + ipv4_enabled = "true" + authorized_networks { + value = "108.12.12.12" + name = "misc" + expiration_time = "2050-11-15T16:19:00.094Z" + } + } + + backup_configuration { + enabled = "true" + start_time = "19:19" + } + + activation_policy = "ON_DEMAND" + } +} +` + +var testGoogleSqlDatabaseInstance_replica = ` +resource "google_sql_database_instance" "instance_master" { + name = "tf-lw-%d" + database_version = "MYSQL_5_6" + region = "us-central1" + + settings { + tier = "db-n1-standard-1" + + backup_configuration { + enabled = true + start_time = "00:00" + binary_log_enabled = true + } + } +} + +resource "google_sql_database_instance" "replica1" { + name = "tf-lw-%d-1" + database_version = "MYSQL_5_6" + region = "us-central1" + + settings { + tier = "db-n1-standard-1" + } + + master_instance_name = "${google_sql_database_instance.instance_master.name}" + + replica_configuration { + connect_retry_interval = 100 + master_heartbeat_period = 10000 + password = "password" + username = "username" + ssl_cipher = "ALL" + verify_server_certificate = false + } +} + +resource "google_sql_database_instance" "replica2" { + name = "tf-lw-%d-2" + database_version = "MYSQL_5_6" + region = "us-central1" + + settings { + tier = "db-n1-standard-1" + } + + master_instance_name = "${google_sql_database_instance.instance_master.name}" + + replica_configuration { + connect_retry_interval = 100 + master_heartbeat_period = 10000 + password = "password" + username = "username" + ssl_cipher = "ALL" + verify_server_certificate = false + } +} +` + +var testGoogleSqlDatabaseInstance_slave = ` +resource "google_sql_database_instance" "instance_master" { + name = "tf-lw-%d" + region = "us-central1" + + settings { + tier = "db-f1-micro" + + backup_configuration { + enabled = true + binary_log_enabled = true + } + } +} + +resource "google_sql_database_instance" "instance_slave" { + name = "tf-lw-%d" + region = "us-central1" + + master_instance_name = "${google_sql_database_instance.instance_master.name}" + + settings { + tier = "db-f1-micro" + } +} +` + +var testGoogleSqlDatabaseInstance_highAvailability = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central1" + database_version = "POSTGRES_9_6" + + settings { + tier = "db-f1-micro" + + availability_type = "REGIONAL" + + backup_configuration { + enabled = true + } + } +} +` + +var testGoogleSqlDatabaseInstance_diskspecs = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central1" + + settings { + tier = "db-f1-micro" + disk_autoresize = true + disk_size = 15 + disk_type = "PD_HDD" + } +} +` + +var testGoogleSqlDatabaseInstance_maintenance = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central1" + + settings { + tier = "db-f1-micro" + + maintenance_window { + day = 7 + hour = 3 + update_track = "canary" + } + } +} +` + +var testGoogleSqlDatabaseInstance_authNets_step1 = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central" + settings { + tier = "D0" + crash_safe_replication = false + + ip_configuration { + ipv4_enabled = "true" + authorized_networks { + value = "108.12.12.12" + name = "misc" + expiration_time = "2050-11-15T16:19:00.094Z" + } + } + } +} +` + +var testGoogleSqlDatabaseInstance_authNets_step2 = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central" + settings { + tier = "D0" + crash_safe_replication = false + + ip_configuration { + ipv4_enabled = "true" + } + } +} +` + +var testGoogleSqlDatabaseInstance_multipleOperations = ` +resource "google_sql_database_instance" "instance" { + name = "tf-test-%s" + region = "us-central" + settings { + tier = "D0" + crash_safe_replication = false + } +} + +resource "google_sql_database" "database" { + name = "tf-test-%s" + instance = "${google_sql_database_instance.instance.name}" +} + +resource "google_sql_user" "user" { + name = "tf-test-%s" + instance = "${google_sql_database_instance.instance.name}" + host = "google.com" + password = "hunter2" +} +` + +var testGoogleSqlDatabaseInstance_basic_with_user_labels = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central1" + settings { + tier = "db-f1-micro" + user_labels { + track = "production" + location = "western-division" + } + } +} +` +var testGoogleSqlDatabaseInstance_basic_with_user_labels_update = ` +resource "google_sql_database_instance" "instance" { + name = "tf-lw-%d" + region = "us-central1" + settings { + tier = "db-f1-micro" + user_labels { + track = "production" + } + } +} +` diff --git a/provider/terraform/tests/resource_sql_database_test.go b/provider/terraform/tests/resource_sql_database_test.go new file mode 100644 index 000000000000..6e5f9c6cd93a --- /dev/null +++ b/provider/terraform/tests/resource_sql_database_test.go @@ -0,0 +1,212 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "google.golang.org/api/sqladmin/v1beta4" +) + +func TestAccSqlDatabase_basic(t *testing.T) { + t.Parallel() + + var database sqladmin.Database + + resourceName := "google_sql_database.database" + instanceName := acctest.RandomWithPrefix("sqldatabasetest") + dbName := acctest.RandomWithPrefix("sqldatabasetest") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf(testGoogleSqlDatabase_basic, instanceName, dbName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseExists(resourceName, &database), + testAccCheckGoogleSqlDatabaseEquals(resourceName, &database), + ), + }, + resource.TestStep{ + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + ResourceName: resourceName, + ImportStateId: fmt.Sprintf("%s/%s", instanceName, dbName), + ImportState: true, + ImportStateVerify: true, + }, + + resource.TestStep{ + ResourceName: resourceName, + ImportStateId: fmt.Sprintf("instances/%s/databases/%s", instanceName, dbName), + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + ResourceName: resourceName, + ImportStateId: fmt.Sprintf("%s/%s/%s", getTestProjectFromEnv(), instanceName, dbName), + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + ResourceName: resourceName, + ImportStateId: fmt.Sprintf("projects/%s/instances/%s/databases/%s", getTestProjectFromEnv(), instanceName, dbName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccSqlDatabase_update(t *testing.T) { + t.Parallel() + + var database sqladmin.Database + + instance_name := acctest.RandomWithPrefix("sqldatabasetest") + database_name := acctest.RandomWithPrefix("sqldatabasetest") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlDatabaseDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabase_basic, instance_name, database_name), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseExists( + "google_sql_database.database", &database), + testAccCheckGoogleSqlDatabaseEquals( + "google_sql_database.database", &database), + ), + }, + resource.TestStep{ + Config: fmt.Sprintf( + testGoogleSqlDatabase_latin1, instance_name, database_name), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlDatabaseExists( + "google_sql_database.database", &database), + testAccCheckGoogleSqlDatabaseEquals( + "google_sql_database.database", &database), + ), + }, + }, + }) +} + +func testAccCheckGoogleSqlDatabaseEquals(n string, + database *sqladmin.Database) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Resource not found: %s", n) + } + + database_name := rs.Primary.Attributes["name"] + instance_name := rs.Primary.Attributes["instance"] + charset := rs.Primary.Attributes["charset"] + collation := rs.Primary.Attributes["collation"] + + if database_name != database.Name { + return fmt.Errorf("Error name mismatch, (%s, %s)", database_name, database.Name) + } + + if instance_name != database.Instance { + return fmt.Errorf("Error instance_name mismatch, (%s, %s)", instance_name, database.Instance) + } + + if charset != database.Charset { + return fmt.Errorf("Error charset mismatch, (%s, %s)", charset, database.Charset) + } + + if collation != database.Collation { + return fmt.Errorf("Error collation mismatch, (%s, %s)", collation, database.Collation) + } + + return nil + } +} + +func testAccCheckGoogleSqlDatabaseExists(n string, + database *sqladmin.Database) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Resource not found: %s", n) + } + + database_name := rs.Primary.Attributes["name"] + instance_name := rs.Primary.Attributes["instance"] + found, err := config.clientSqlAdmin.Databases.Get(config.Project, + instance_name, database_name).Do() + + if err != nil { + return fmt.Errorf("Not found: %s: %s", n, err) + } + + *database = *found + + return nil + } +} + +func testAccSqlDatabaseDestroy(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + config := testAccProvider.Meta().(*Config) + if rs.Type != "google_sql_database" { + continue + } + + database_name := rs.Primary.Attributes["name"] + instance_name := rs.Primary.Attributes["instance"] + _, err := config.clientSqlAdmin.Databases.Get(config.Project, + instance_name, database_name).Do() + + if err == nil { + return fmt.Errorf("Database resource still exists") + } + } + + return nil +} + +var testGoogleSqlDatabase_basic = ` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central" + settings { + tier = "D0" + } +} + +resource "google_sql_database" "database" { + name = "%s" + instance = "${google_sql_database_instance.instance.name}" +} +` +var testGoogleSqlDatabase_latin1 = ` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central" + settings { + tier = "D0" + } +} + +resource "google_sql_database" "database" { + name = "%s" + instance = "${google_sql_database_instance.instance.name}" + charset = "latin1" + collation = "latin1_swedish_ci" +} +` diff --git a/provider/terraform/tests/resource_sql_user_migrate_test.go b/provider/terraform/tests/resource_sql_user_migrate_test.go new file mode 100644 index 000000000000..5e03d8d75f84 --- /dev/null +++ b/provider/terraform/tests/resource_sql_user_migrate_test.go @@ -0,0 +1,81 @@ +package google + +import ( + "testing" + + "github.com/hashicorp/terraform/terraform" +) + +func TestSqlUserMigrateState(t *testing.T) { + cases := map[string]struct { + StateVersion int + Attributes map[string]string + Expected map[string]string + Meta interface{} + ID string + ExpectedID string + }{ + "change id from $NAME to $INSTANCENAME.$NAME": { + StateVersion: 0, + Attributes: map[string]string{ + "name": "tf-user", + "instance": "tf-instance", + }, + Expected: map[string]string{ + "name": "tf-user", + "instance": "tf-instance", + }, + Meta: &Config{}, + ID: "tf-user", + ExpectedID: "tf-instance/tf-user", + }, + } + + for tn, tc := range cases { + is := &terraform.InstanceState{ + ID: tc.ID, + Attributes: tc.Attributes, + } + is, err := resourceSqlUserMigrateState( + tc.StateVersion, is, tc.Meta) + + if err != nil { + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + if is.ID != tc.ExpectedID { + t.Fatalf("bad ID.\n\n expected: %s\n got: %s", tc.ExpectedID, is.ID) + } + + for k, v := range tc.Expected { + if is.Attributes[k] != v { + t.Fatalf( + "bad: %s\n\n expected: %#v -> %#v\n got: %#v -> %#v\n in: %#v", + tn, k, v, k, is.Attributes[k], is.Attributes) + } + } + } +} + +func TestSqlUserMigrateState_empty(t *testing.T) { + var is *terraform.InstanceState + var meta *Config + + // should handle nil + is, err := resourceSqlUserMigrateState(0, is, meta) + + if err != nil { + t.Fatalf("err: %#v", err) + } + if is != nil { + t.Fatalf("expected nil instancestate, got: %#v", is) + } + + // should handle non-nil but empty + is = &terraform.InstanceState{} + is, err = resourceSqlUserMigrateState(0, is, meta) + + if err != nil { + t.Fatalf("err: %#v", err) + } +} diff --git a/provider/terraform/tests/resource_sql_user_test.go b/provider/terraform/tests/resource_sql_user_test.go new file mode 100644 index 000000000000..d7869b5907f6 --- /dev/null +++ b/provider/terraform/tests/resource_sql_user_test.go @@ -0,0 +1,177 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccSqlUser_mysql(t *testing.T) { + t.Parallel() + + instance := acctest.RandomWithPrefix("i") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlUserDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleSqlUser_mysql(instance, "password"), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlUserExists("google_sql_user.user1"), + testAccCheckGoogleSqlUserExists("google_sql_user.user2"), + ), + }, + resource.TestStep{ + // Update password + Config: testGoogleSqlUser_mysql(instance, "new_password"), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlUserExists("google_sql_user.user1"), + testAccCheckGoogleSqlUserExists("google_sql_user.user2"), + ), + }, + resource.TestStep{ + ResourceName: "google_sql_user.user2", + ImportStateId: instance + "/gmail.com/admin", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + }, + }) +} + +func TestAccSqlUser_postgres(t *testing.T) { + t.Parallel() + + instance := acctest.RandomWithPrefix("i") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccSqlUserDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleSqlUser_postgres(instance, "password"), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlUserExists("google_sql_user.user"), + ), + }, + resource.TestStep{ + // Update password + Config: testGoogleSqlUser_postgres(instance, "new_password"), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlUserExists("google_sql_user.user"), + ), + }, + resource.TestStep{ + ResourceName: "google_sql_user.user", + ImportStateId: instance + "/admin", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + }, + }) +} + +func testAccCheckGoogleSqlUserExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Resource not found: %s", n) + } + + name := rs.Primary.Attributes["name"] + instance := rs.Primary.Attributes["instance"] + host := rs.Primary.Attributes["host"] + users, err := config.clientSqlAdmin.Users.List(config.Project, + instance).Do() + + if err != nil { + return err + } + + for _, user := range users.Items { + if user.Name == name && user.Host == host { + return nil + } + } + + return fmt.Errorf("Not found: %s: %s", n, err) + } +} + +func testAccSqlUserDestroy(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + config := testAccProvider.Meta().(*Config) + if rs.Type != "google_sql_database" { + continue + } + + name := rs.Primary.Attributes["name"] + instance := rs.Primary.Attributes["instance"] + host := rs.Primary.Attributes["host"] + users, err := config.clientSqlAdmin.Users.List(config.Project, + instance).Do() + + for _, user := range users.Items { + if user.Name == name && user.Host == host { + return fmt.Errorf("User still %s exists %s", name, err) + } + } + + return nil + } + + return nil +} + +func testGoogleSqlUser_mysql(instance, password string) string { + return fmt.Sprintf(` + resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central" + settings { + tier = "D0" + } + } + + resource "google_sql_user" "user1" { + name = "admin" + instance = "${google_sql_database_instance.instance.name}" + host = "google.com" + password = "%s" + } + + resource "google_sql_user" "user2" { + name = "admin" + instance = "${google_sql_database_instance.instance.name}" + host = "gmail.com" + password = "hunter2" + } + `, instance, password) +} + +func testGoogleSqlUser_postgres(instance, password string) string { + return fmt.Sprintf(` + resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "POSTGRES_9_6" + + settings { + tier = "db-f1-micro" + } + } + + resource "google_sql_user" "user" { + name = "admin" + instance = "${google_sql_database_instance.instance.name}" + password = "%s" + } + `, instance, password) +} diff --git a/provider/terraform/tests/resource_storage_bucket_acl_test.go b/provider/terraform/tests/resource_storage_bucket_acl_test.go new file mode 100644 index 000000000000..53885ffbad8f --- /dev/null +++ b/provider/terraform/tests/resource_storage_bucket_acl_test.go @@ -0,0 +1,290 @@ +package google + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +var ( + roleEntityBasic1 = "OWNER:user-paddy@hashicorp.com" + roleEntityBasic2 = "READER:user-paddy@carvers.co" + roleEntityBasic3_owner = "OWNER:user-paddy@paddy.io" + roleEntityBasic3_reader = "READER:user-foran.paddy@gmail.com" + + roleEntityOwners = "OWNER:project-owners-" + os.Getenv("GOOGLE_PROJECT_NUMBER") + roleEntityEditors = "OWNER:project-editors-" + os.Getenv("GOOGLE_PROJECT_NUMBER") + roleEntityViewers = "READER:project-viewers-" + os.Getenv("GOOGLE_PROJECT_NUMBER") +) + +func testBucketName() string { + return fmt.Sprintf("%s-%d", "tf-test-acl-bucket", acctest.RandInt()) +} + +func TestAccStorageBucketAcl_basic(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + skipIfEnvNotSet(t, "GOOGLE_PROJECT_NUMBER") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsAclBasic1(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic1), + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic2), + ), + }, + }, + }) +} + +func TestAccStorageBucketAcl_upgrade(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + skipIfEnvNotSet(t, "GOOGLE_PROJECT_NUMBER") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsAclBasic1(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic1), + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic2), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageBucketsAclBasic2(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic3_owner), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageBucketsAclBasicDelete(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageBucketAclDelete(bucketName, roleEntityBasic1), + testAccCheckGoogleStorageBucketAclDelete(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageBucketAclDelete(bucketName, roleEntityBasic3_owner), + ), + }, + }, + }) +} + +func TestAccStorageBucketAcl_downgrade(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + skipIfEnvNotSet(t, "GOOGLE_PROJECT_NUMBER") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsAclBasic2(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic3_owner), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageBucketsAclBasic3(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageBucketAcl(bucketName, roleEntityBasic3_reader), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageBucketsAclBasicDelete(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageBucketAclDelete(bucketName, roleEntityBasic1), + testAccCheckGoogleStorageBucketAclDelete(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageBucketAclDelete(bucketName, roleEntityBasic3_owner), + ), + }, + }, + }) +} + +func TestAccStorageBucketAcl_predefined(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsAclPredefined(bucketName), + }, + }, + }) +} + +// Test that we allow the API to reorder our role entities without perma-diffing. +func TestAccStorageBucketAcl_unordered(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + skipIfEnvNotSet(t, "GOOGLE_PROJECT_NUMBER") + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsAclUnordered(bucketName), + }, + }, + }) +} + +func testAccCheckGoogleStorageBucketAclDelete(bucket, roleEntityS string) resource.TestCheckFunc { + return func(s *terraform.State) error { + roleEntity, _ := getRoleEntityPair(roleEntityS) + config := testAccProvider.Meta().(*Config) + + _, err := config.clientStorage.BucketAccessControls.Get(bucket, roleEntity.Entity).Do() + + if err != nil { + return nil + } + + return fmt.Errorf("Error, entity %s still exists", roleEntity.Entity) + } +} + +func testAccCheckGoogleStorageBucketAcl(bucket, roleEntityS string) resource.TestCheckFunc { + return func(s *terraform.State) error { + roleEntity, _ := getRoleEntityPair(roleEntityS) + config := testAccProvider.Meta().(*Config) + + res, err := config.clientStorage.BucketAccessControls.Get(bucket, roleEntity.Entity).Do() + + if err != nil { + return fmt.Errorf("Error retrieving contents of acl for bucket %s: %s", bucket, err) + } + + if res.Role != roleEntity.Role { + return fmt.Errorf("Error, Role mismatch %s != %s", res.Role, roleEntity.Role) + } + + return nil + } +} + +func testAccStorageBucketAclDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_storage_bucket_acl" { + continue + } + + bucket := rs.Primary.Attributes["bucket"] + + _, err := config.clientStorage.BucketAccessControls.List(bucket).Do() + + if err == nil { + return fmt.Errorf("Acl for bucket %s still exists", bucket) + } + } + + return nil +} + +func testGoogleStorageBucketsAclBasic1(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s", "%s", "%s", "%s"] +} +`, bucketName, roleEntityOwners, roleEntityEditors, roleEntityViewers, roleEntityBasic1, roleEntityBasic2) +} + +func testGoogleStorageBucketsAclBasic2(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s", "%s", "%s", "%s"] +} +`, bucketName, roleEntityOwners, roleEntityEditors, roleEntityViewers, roleEntityBasic2, roleEntityBasic3_owner) +} + +func testGoogleStorageBucketsAclBasicDelete(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + role_entity = [] +} +`, bucketName) +} + +func testGoogleStorageBucketsAclBasic3(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s", "%s", "%s", "%s"] +} +`, bucketName, roleEntityOwners, roleEntityEditors, roleEntityViewers, roleEntityBasic2, roleEntityBasic3_reader) +} + +func testGoogleStorageBucketsAclUnordered(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s", "%s", "%s", "%s"] +} +`, bucketName, roleEntityBasic1, roleEntityViewers, roleEntityOwners, roleEntityBasic2, roleEntityEditors) +} + +func testGoogleStorageBucketsAclPredefined(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + predefined_acl = "projectPrivate" + default_acl = "projectPrivate" +} +`, bucketName) +} diff --git a/provider/terraform/tests/resource_storage_bucket_iam_test.go b/provider/terraform/tests/resource_storage_bucket_iam_test.go new file mode 100644 index 000000000000..12fa2b957173 --- /dev/null +++ b/provider/terraform/tests/resource_storage_bucket_iam_test.go @@ -0,0 +1,265 @@ +package google + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccStorageBucketIamBinding(t *testing.T) { + t.Parallel() + + bucket := acctest.RandomWithPrefix("tf-test") + account := acctest.RandomWithPrefix("tf-test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test IAM Binding creation + Config: testAccStorageBucketIamBinding_basic(bucket, account), + Check: testAccCheckGoogleStorageBucketIam(bucket, "roles/storage.objectViewer", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + // Test IAM Binding update + Config: testAccStorageBucketIamBinding_update(bucket, account), + Check: testAccCheckGoogleStorageBucketIam(bucket, "roles/storage.objectViewer", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + fmt.Sprintf("serviceAccount:%s-2@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + }, + }) +} + +func TestAccStorageBucketIamPolicy(t *testing.T) { + t.Parallel() + + bucket := acctest.RandomWithPrefix("tf-test") + account := acctest.RandomWithPrefix("tf-test") + serviceAcct := getTestServiceAccountFromEnv(t) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test IAM Policy creation + Config: testAccStorageBucketIamPolicy_basic(bucket, account, serviceAcct), + Check: testAccCheckGoogleStorageBucketIam(bucket, "roles/storage.objectViewer", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + { + // Test IAM Policy update + Config: testAccStorageBucketIamPolicy_update(bucket, account, serviceAcct), + Check: testAccCheckGoogleStorageBucketIam(bucket, "roles/storage.objectViewer", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + fmt.Sprintf("serviceAccount:%s-2@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + }, + }) +} + +func TestAccStorageBucketIamMember(t *testing.T) { + t.Parallel() + + bucket := acctest.RandomWithPrefix("tf-test") + account := acctest.RandomWithPrefix("tf-test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccStorageBucketIamMember_basic(bucket, account), + Check: testAccCheckGoogleStorageBucketIam(bucket, "roles/storage.admin", []string{ + fmt.Sprintf("serviceAccount:%s-1@%s.iam.gserviceaccount.com", account, getTestProjectFromEnv()), + }), + }, + }, + }) +} + +func testAccCheckGoogleStorageBucketIam(bucket, role string, members []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + p, err := config.clientStorage.Buckets.GetIamPolicy(bucket).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == role { + sort.Strings(members) + sort.Strings(binding.Members) + + if reflect.DeepEqual(members, binding.Members) { + return nil + } + + return fmt.Errorf("Binding found but expected members is %v, got %v", members, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", role) + } +} + +func testAccStorageBucketIamPolicy_update(bucket, account, serviceAcct string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_service_account" "test-account-2" { + account_id = "%s-2" + display_name = "Iam Testing Account" +} + + +data "google_iam_policy" "foo-policy" { + binding { + role = "roles/storage.objectViewer" + + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + "serviceAccount:${google_service_account.test-account-2.email}", + ] + } + + binding { + role = "roles/storage.admin" + members = [ + "serviceAccount:%s", + ] + } +} + +resource "google_storage_bucket_iam_policy" "bucket-binding" { + bucket = "${google_storage_bucket.bucket.name}" + policy_data = "${data.google_iam_policy.foo-policy.policy_data}" +} + +`, bucket, account, account, serviceAcct) +} + +func testAccStorageBucketIamPolicy_basic(bucket, account, serviceAcct string) string { + return fmt.Sprintf(` + +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + + +data "google_iam_policy" "foo-policy" { + binding { + role = "roles/storage.objectViewer" + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + ] + } + + binding { + role = "roles/storage.admin" + members = [ + "serviceAccount:%s", + ] + } +} + +resource "google_storage_bucket_iam_policy" "bucket-binding" { + bucket = "${google_storage_bucket.bucket.name}" + policy_data = "${data.google_iam_policy.foo-policy.policy_data}" +} + + +`, bucket, account, serviceAcct) +} + +func testAccStorageBucketIamBinding_basic(bucket, account string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_storage_bucket_iam_binding" "foo" { + bucket = "${google_storage_bucket.bucket.name}" + role = "roles/storage.objectViewer" + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + ] +} +`, bucket, account) +} + +func testAccStorageBucketIamBinding_update(bucket, account string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_service_account" "test-account-2" { + account_id = "%s-2" + display_name = "Iam Testing Account" +} + +resource "google_storage_bucket_iam_binding" "foo" { + bucket = "${google_storage_bucket.bucket.name}" + role = "roles/storage.objectViewer" + members = [ + "serviceAccount:${google_service_account.test-account-1.email}", + "serviceAccount:${google_service_account.test-account-2.email}", + ] +} +`, bucket, account, account) +} + +func testAccStorageBucketIamMember_basic(bucket, account string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_service_account" "test-account-1" { + account_id = "%s-1" + display_name = "Iam Testing Account" +} + +resource "google_storage_bucket_iam_member" "foo" { + bucket = "${google_storage_bucket.bucket.name}" + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test-account-1.email}" +} +`, bucket, account) +} diff --git a/provider/terraform/tests/resource_storage_bucket_object_test.go b/provider/terraform/tests/resource_storage_bucket_object_test.go new file mode 100644 index 000000000000..b7df23aa8012 --- /dev/null +++ b/provider/terraform/tests/resource_storage_bucket_object_test.go @@ -0,0 +1,377 @@ +package google + +import ( + "crypto/md5" + "encoding/base64" + "fmt" + "io/ioutil" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "os" + + "google.golang.org/api/storage/v1" +) + +const ( + objectName = "tf-gce-test" + content = "now this is content!" +) + +func TestAccStorageObject_basic(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + data := []byte("data data data") + h := md5.New() + h.Write(data) + data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil)) + + testFile := getNewTmpTestFile(t, "tf-test") + ioutil.WriteFile(testFile.Name(), data, 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsObjectBasic(bucketName, testFile.Name()), + Check: testAccCheckGoogleStorageObject(bucketName, objectName, data_md5), + }, + }, + }) +} + +func TestAccStorageObject_recreate(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + + writeFile := func(name string, data []byte) string { + h := md5.New() + h.Write(data) + data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil)) + + ioutil.WriteFile(name, data, 0644) + return data_md5 + } + testFile := getNewTmpTestFile(t, "tf-test") + data_md5 := writeFile(testFile.Name(), []byte("data data data")) + updatedName := testFile.Name() + ".update" + updated_data_md5 := writeFile(updatedName, []byte("datum")) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsObjectBasic(bucketName, testFile.Name()), + Check: testAccCheckGoogleStorageObject(bucketName, objectName, data_md5), + }, + resource.TestStep{ + PreConfig: func() { + err := os.Rename(updatedName, testFile.Name()) + if err != nil { + t.Errorf("Failed to rename %s to %s", updatedName, testFile.Name()) + } + }, + Config: testGoogleStorageBucketsObjectBasic(bucketName, testFile.Name()), + Check: testAccCheckGoogleStorageObject(bucketName, objectName, updated_data_md5), + }, + }, + }) +} + +func TestAccStorageObject_content(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + data := []byte(content) + h := md5.New() + h.Write(data) + data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil)) + + testFile := getNewTmpTestFile(t, "tf-test") + ioutil.WriteFile(testFile.Name(), data, 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsObjectContent(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObject(bucketName, objectName, data_md5), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "content_type", "text/plain; charset=utf-8"), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "storage_class", "STANDARD"), + ), + }, + }, + }) +} + +func TestAccStorageObject_withContentCharacteristics(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + data := []byte(content) + h := md5.New() + h.Write(data) + data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil)) + testFile := getNewTmpTestFile(t, "tf-test") + ioutil.WriteFile(testFile.Name(), data, 0644) + + disposition, encoding, language, content_type := "inline", "compress", "en", "binary/octet-stream" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsObject_optionalContentFields( + bucketName, disposition, encoding, language, content_type), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObject(bucketName, objectName, data_md5), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "content_disposition", disposition), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "content_encoding", encoding), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "content_language", language), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "content_type", content_type), + ), + }, + }, + }) +} + +func TestAccStorageObject_dynamicContent(t *testing.T) { + t.Parallel() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsObjectDynamicContent(testBucketName()), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "content_type", "text/plain; charset=utf-8"), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "storage_class", "STANDARD"), + ), + }, + }, + }) +} + +func TestAccStorageObject_cacheControl(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + data := []byte(content) + h := md5.New() + h.Write(data) + data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil)) + testFile := getNewTmpTestFile(t, "tf-test") + ioutil.WriteFile(testFile.Name(), data, 0644) + + cacheControl := "private" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsObject_cacheControl(bucketName, testFile.Name(), cacheControl), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObject(bucketName, objectName, data_md5), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "cache_control", cacheControl), + ), + }, + }, + }) +} + +func TestAccStorageObject_storageClass(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + data := []byte(content) + h := md5.New() + h.Write(data) + data_md5 := base64.StdEncoding.EncodeToString(h.Sum(nil)) + testFile := getNewTmpTestFile(t, "tf-test") + ioutil.WriteFile(testFile.Name(), data, 0644) + + storageClass := "MULTI_REGIONAL" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsObject_storageClass(bucketName, storageClass), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObject(bucketName, objectName, data_md5), + resource.TestCheckResourceAttr( + "google_storage_bucket_object.object", "storage_class", storageClass), + ), + }, + }, + }) +} + +func testAccCheckGoogleStorageObject(bucket, object, md5 string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + objectsService := storage.NewObjectsService(config.clientStorage) + + getCall := objectsService.Get(bucket, object) + res, err := getCall.Do() + + if err != nil { + return fmt.Errorf("Error retrieving contents of object %s: %s", object, err) + } + + if md5 != res.Md5Hash { + return fmt.Errorf("Error contents of %s garbled, md5 hashes don't match (%s, %s)", object, md5, res.Md5Hash) + } + + return nil + } +} + +func testAccStorageObjectDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_storage_bucket_object" { + continue + } + + bucket := rs.Primary.Attributes["bucket"] + name := rs.Primary.Attributes["name"] + + objectsService := storage.NewObjectsService(config.clientStorage) + + getCall := objectsService.Get(bucket, name) + _, err := getCall.Do() + + if err == nil { + return fmt.Errorf("Object %s still exists", name) + } + } + + return nil +} + +func testGoogleStorageBucketsObjectContent(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + content = "%s" +} +`, bucketName, objectName, content) +} + +func testGoogleStorageBucketsObjectDynamicContent(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + content = "${google_storage_bucket.bucket.project}" +} +`, bucketName, objectName) +} + +func testGoogleStorageBucketsObjectBasic(bucketName, sourceFilename string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" +} +`, bucketName, objectName, sourceFilename) +} + +func testGoogleStorageBucketsObject_optionalContentFields( + bucketName, disposition, encoding, language, content_type string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + content = "%s" + content_disposition = "%s" + content_encoding = "%s" + content_language = "%s" + content_type = "%s" +} +`, bucketName, objectName, content, disposition, encoding, language, content_type) +} + +func testGoogleStorageBucketsObject_cacheControl(bucketName, sourceFilename, cacheControl string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" + cache_control = "%s" +} +`, bucketName, objectName, sourceFilename, cacheControl) +} + +func testGoogleStorageBucketsObject_storageClass(bucketName string, storageClass string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + content = "%s" + storage_class = "%s" +} +`, bucketName, objectName, content, storageClass) +} + +// Creates a new tmp test file. Fails the current test if we cannot create +// new tmp file in the filesystem. +func getNewTmpTestFile(t *testing.T, prefix string) *os.File { + testFile, err := ioutil.TempFile("", prefix) + if err != nil { + t.Fatalf("Cannot create temp file: %s", err) + } + return testFile +} diff --git a/provider/terraform/tests/resource_storage_bucket_test.go b/provider/terraform/tests/resource_storage_bucket_test.go new file mode 100644 index 000000000000..5d78ebde436f --- /dev/null +++ b/provider/terraform/tests/resource_storage_bucket_test.go @@ -0,0 +1,979 @@ +package google + +import ( + "bytes" + "fmt" + "log" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "google.golang.org/api/googleapi" + "google.golang.org/api/storage/v1" +) + +func TestAccStorageBucket_basic(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "location", "US"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "false"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + ), + }, + resource.TestStep{ + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccStorageBucket_lowercaseLocation(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_lowercaseLocation(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + }, + }) +} + +func TestAccStorageBucket_customAttributes(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "location", "EU"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + ), + }, + }, + }) +} + +func TestAccStorageBucket_lifecycleRules(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt()) + + hash_step0_lc0_action := resourceGCSBucketLifecycleRuleActionHash(map[string]interface{}{"type": "SetStorageClass", "storage_class": "NEARLINE"}) + hash_step0_lc0_condition := resourceGCSBucketLifecycleRuleConditionHash(map[string]interface{}{"age": 2, "created_before": "", "is_live": false, "num_newer_versions": 0}) + + hash_step0_lc1_action := resourceGCSBucketLifecycleRuleActionHash(map[string]interface{}{"type": "Delete", "storage_class": ""}) + hash_step0_lc1_condition := resourceGCSBucketLifecycleRuleConditionHash(map[string]interface{}{"age": 10, "created_before": "", "is_live": false, "num_newer_versions": 0}) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_lifecycleRules(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.#", "2"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.0.action.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.0.action.%d.type", hash_step0_lc0_action), "SetStorageClass"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.0.action.%d.storage_class", hash_step0_lc0_action), "NEARLINE"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.0.condition.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.0.condition.%d.age", hash_step0_lc0_condition), "2"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.1.action.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.1.action.%d.type", hash_step0_lc1_action), "Delete"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.1.condition.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.1.condition.%d.age", hash_step0_lc1_condition), "10"), + ), + }, + resource.TestStep{ + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccStorageBucket_storageClass(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_storageClass(bucketName, "MULTI_REGIONAL", ""), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "storage_class", "MULTI_REGIONAL"), + ), + }, + { + Config: testAccStorageBucket_storageClass(bucketName, "NEARLINE", ""), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "storage_class", "NEARLINE"), + ), + }, + { + Config: testAccStorageBucket_storageClass(bucketName, "REGIONAL", "US-CENTRAL1"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "storage_class", "REGIONAL"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "location", "US-CENTRAL1"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccStorageBucket_update(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + hash_step2_lc0_action := resourceGCSBucketLifecycleRuleActionHash(map[string]interface{}{"type": "Delete", "storage_class": ""}) + hash_step2_lc0_condition := resourceGCSBucketLifecycleRuleConditionHash(map[string]interface{}{"age": 10, "created_before": "", "is_live": false, "num_newer_versions": 0}) + + hash_step3_lc0_action := resourceGCSBucketLifecycleRuleActionHash(map[string]interface{}{"type": "SetStorageClass", "storage_class": "NEARLINE"}) + hash_step3_lc0_condition := resourceGCSBucketLifecycleRuleConditionHash(map[string]interface{}{"age": 2, "created_before": "", "is_live": false, "num_newer_versions": 0}) + + hash_step3_lc1_action := resourceGCSBucketLifecycleRuleActionHash(map[string]interface{}{"type": "Delete", "storage_class": ""}) + hash_step3_lc1_condition := resourceGCSBucketLifecycleRuleConditionHash(map[string]interface{}{"age": 10, "created_before": "", "is_live": false, "num_newer_versions": 2}) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "location", "US"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "false"), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "location", "EU"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_customAttributes_withLifecycle1(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", getTestProjectFromEnv()), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "location", "EU"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.0.action.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.0.action.%d.type", hash_step2_lc0_action), "Delete"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.0.condition.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.0.condition.%d.age", hash_step2_lc0_condition), "10"), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_customAttributes_withLifecycle2(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "location", "EU"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.#", "2"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.0.action.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.0.action.%d.type", hash_step3_lc0_action), "SetStorageClass"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.0.action.%d.storage_class", hash_step3_lc0_action), "NEARLINE"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.0.condition.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.0.condition.%d.age", hash_step3_lc0_condition), "2"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.1.action.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.1.action.%d.type", hash_step3_lc1_action), "Delete"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.1.condition.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.1.condition.%d.age", hash_step3_lc1_condition), "10"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", fmt.Sprintf("lifecycle_rule.1.condition.%d.num_newer_versions", hash_step3_lc1_condition), "2"), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "location", "EU"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "lifecycle_rule.#", "0"), + ), + }, + }, + }) +} + +func TestAccStorageBucket_forceDestroy(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketPutItem(bucketName), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_customAttributes(acctest.RandomWithPrefix("tf-test-acl-bucket")), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketMissing(bucketName), + ), + }, + }, + }) +} + +func TestAccStorageBucket_forceDestroyWithVersioning(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_forceDestroyWithVersioning(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_forceDestroyWithVersioning(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketPutItem(bucketName), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_forceDestroyWithVersioning(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketPutItem(bucketName), + ), + }, + }, + }) +} + +func TestAccStorageBucket_versioning(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_versioning(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.0.enabled", "true"), + ), + }, + resource.TestStep{ + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccStorageBucket_logging(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_logging(bucketName, "log-bucket"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.0.log_bucket", "log-bucket"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.0.log_object_prefix", bucketName), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_loggingWithPrefix(bucketName, "another-log-bucket", "object-prefix"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.0.log_bucket", "another-log-bucket"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.0.log_object_prefix", "object-prefix"), + ), + }, + resource.TestStep{ + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.#", "0"), + ), + }, + }, + }) +} + +func TestAccStorageBucket_cors(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageBucketsCors(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + }, + }) + + if len(bucket.Cors) != 2 { + t.Errorf("Expected # of cors elements to be 2, got %d", len(bucket.Cors)) + } + + firstArr := bucket.Cors[0] + if firstArr.MaxAgeSeconds != 10 { + t.Errorf("Expected first block's MaxAgeSeconds to be 10, got %d", firstArr.MaxAgeSeconds) + } + + for i, v := range []string{"abc", "def"} { + if firstArr.Origin[i] != v { + t.Errorf("Expected value in first block origin to be to be %v, got %v", v, firstArr.Origin[i]) + } + } + + for i, v := range []string{"a1a"} { + if firstArr.Method[i] != v { + t.Errorf("Expected value in first block method to be to be %v, got %v", v, firstArr.Method[i]) + } + } + + for i, v := range []string{"123", "456", "789"} { + if firstArr.ResponseHeader[i] != v { + t.Errorf("Expected value in first block response headerto be to be %v, got %v", v, firstArr.ResponseHeader[i]) + } + } + + secondArr := bucket.Cors[1] + if secondArr.MaxAgeSeconds != 5 { + t.Errorf("Expected second block's MaxAgeSeconds to be 5, got %d", secondArr.MaxAgeSeconds) + } + + for i, v := range []string{"ghi", "jkl"} { + if secondArr.Origin[i] != v { + t.Errorf("Expected value in second block origin to be to be %v, got %v", v, secondArr.Origin[i]) + } + } + + for i, v := range []string{"z9z"} { + if secondArr.Method[i] != v { + t.Errorf("Expected value in second block method to be to be %v, got %v", v, secondArr.Method[i]) + } + } + + for i, v := range []string{"000"} { + if secondArr.ResponseHeader[i] != v { + t.Errorf("Expected value in second block response headerto be to be %v, got %v", v, secondArr.ResponseHeader[i]) + } + } +} + +func TestAccStorageBucket_encryption(t *testing.T) { + t.Parallel() + + projectId := "terraform-" + acctest.RandString(10) + projectOrg := getTestOrgFromEnv(t) + projectBillingAccount := getTestBillingAccountFromEnv(t) + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(10)) + bucketName := fmt.Sprintf("tf-test-crypto-bucket-%d", acctest.RandInt()) + var bucket storage.Bucket + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccStorageBucket_encryption(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + }, + }) +} + +func TestAccStorageBucket_labels(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt()) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageBucketDestroy, + Steps: []resource.TestStep{ + // Going from two labels + resource.TestStep{ + Config: testAccStorageBucket_updateLabels(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketHasLabel(&bucket, "my-label", "my-updated-label-value"), + testAccCheckStorageBucketHasLabel(&bucket, "a-new-label", "a-new-label-value"), + ), + }, + resource.TestStep{ + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + }, + // Down to only one label (test single label deletion) + resource.TestStep{ + Config: testAccStorageBucket_labels(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketHasLabel(&bucket, "my-label", "my-label-value"), + ), + }, + resource.TestStep{ + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + }, + // And make sure deleting all labels work + resource.TestStep{ + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketHasNoLabels(&bucket), + ), + }, + resource.TestStep{ + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckStorageBucketExists(n string, bucketName string, bucket *storage.Bucket) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No Project_ID is set") + } + + config := testAccProvider.Meta().(*Config) + + found, err := config.clientStorage.Buckets.Get(rs.Primary.ID).Do() + if err != nil { + return err + } + + if found.Id != rs.Primary.ID { + return fmt.Errorf("Bucket not found") + } + + if found.Name != bucketName { + return fmt.Errorf("expected name %s, got %s", bucketName, found.Name) + } + + *bucket = *found + return nil + } +} + +func testAccCheckStorageBucketHasLabel(bucket *storage.Bucket, key, value string) resource.TestCheckFunc { + return func(s *terraform.State) error { + val, ok := bucket.Labels[key] + if !ok { + return fmt.Errorf("Label with key %s not found", key) + } + + if val != value { + return fmt.Errorf("Label value did not match for key %s: expected %s but found %s", key, value, val) + } + return nil + } +} + +func testAccCheckStorageBucketHasNoLabels(bucket *storage.Bucket) resource.TestCheckFunc { + return func(s *terraform.State) error { + if len(bucket.Labels) > 0 { + return fmt.Errorf("Expected 0 labels, found %v", bucket.Labels) + } + return nil + } +} + +func testAccCheckStorageBucketPutItem(bucketName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + data := bytes.NewBufferString("test") + dataReader := bytes.NewReader(data.Bytes()) + object := &storage.Object{Name: "bucketDestroyTestFile"} + + // This needs to use Media(io.Reader) call, otherwise it does not go to /upload API and fails + if res, err := config.clientStorage.Objects.Insert(bucketName, object).Media(dataReader).Do(); err == nil { + log.Printf("[INFO] Created object %v at location %v\n\n", res.Name, res.SelfLink) + } else { + return fmt.Errorf("Objects.Insert failed: %v", err) + } + + return nil + } +} + +func testAccCheckStorageBucketMissing(bucketName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + _, err := config.clientStorage.Buckets.Get(bucketName).Do() + if err == nil { + return fmt.Errorf("Found %s", bucketName) + } + + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + return nil + } + + return err + } +} + +func testAccStorageBucketDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_storage_bucket" { + continue + } + + _, err := config.clientStorage.Buckets.Get(rs.Primary.ID).Do() + if err == nil { + return fmt.Errorf("Bucket still exists") + } + } + + return nil +} + +func testAccStorageBucket_basic(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} +`, bucketName) +} + +func testAccStorageBucket_lowercaseLocation(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "eu" +} +`, bucketName) +} + +func testAccStorageBucket_customAttributes(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" +} +`, bucketName) +} + +func testAccStorageBucket_customAttributes_withLifecycle1(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_customAttributes_withLifecycle2(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + age = 2 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + num_newer_versions = 2 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_storageClass(bucketName, storageClass, location string) string { + var locationBlock string + if location != "" { + locationBlock = fmt.Sprintf(` + location = "%s"`, location) + } + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + storage_class = "%s"%s +} +`, bucketName, storageClass, locationBlock) +} + +func testGoogleStorageBucketsCors(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + cors { + origin = ["abc", "def"] + method = ["a1a"] + response_header = ["123", "456", "789"] + max_age_seconds = 10 + } + + cors { + origin = ["ghi", "jkl"] + method = ["z9z"] + response_header = ["000"] + max_age_seconds = 5 + } +} +`, bucketName) +} + +func testAccStorageBucket_forceDestroyWithVersioning(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + force_destroy = "true" + versioning = { + enabled = "true" + } +} +`, bucketName) +} + +func testAccStorageBucket_versioning(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + versioning = { + enabled = "true" + } +} +`, bucketName) +} + +func testAccStorageBucket_logging(bucketName string, logBucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + logging = { + log_bucket = "%s" + } +} +`, bucketName, logBucketName) +} + +func testAccStorageBucket_loggingWithPrefix(bucketName string, logBucketName string, prefix string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + logging = { + log_bucket = "%s" + log_object_prefix = "%s" + } +} +`, bucketName, logBucketName, prefix) +} + +func testAccStorageBucket_lifecycleRules(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + age = 2 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_labels(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + labels { + my-label = "my-label-value" + } +} +`, bucketName) +} + +func testAccStorageBucket_encryption(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, bucketName string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + name = "%s" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_services" "acceptance" { + project = "${google_project.acceptance.project_id}" + + services = [ + "cloudkms.googleapis.com", + ] +} + +resource "google_kms_key_ring" "key_ring" { + project = "${google_project_services.acceptance.project}" + name = "%s" + location = "us" +} + +resource "google_kms_crypto_key" "crypto_key" { + name = "%s" + key_ring = "${google_kms_key_ring.key_ring.id}" + rotation_period = "1000000s" +} + +resource "google_storage_bucket" "bucket" { + name = "%s" + encryption { + default_kms_key_name = "${google_kms_crypto_key.crypto_key.self_link}" + } +} + `, projectId, projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName, bucketName) +} + +func testAccStorageBucket_updateLabels(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + labels { + my-label = "my-updated-label-value" + a-new-label = "a-new-label-value" + } +} +`, bucketName) +} diff --git a/provider/terraform/tests/resource_storage_default_object_acl_test.go b/provider/terraform/tests/resource_storage_default_object_acl_test.go new file mode 100644 index 000000000000..bf4de1b9ff40 --- /dev/null +++ b/provider/terraform/tests/resource_storage_default_object_acl_test.go @@ -0,0 +1,218 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccStorageDefaultObjectAcl_basic(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageDefaultObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageDefaultObjectsAclBasic(bucketName, roleEntityBasic1, roleEntityBasic2), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic1), + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic2), + ), + }, + }, + }) +} + +func TestAccStorageDefaultObjectAcl_upgrade(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageDefaultObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageDefaultObjectsAclBasic(bucketName, roleEntityBasic1, roleEntityBasic2), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic1), + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic2), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageDefaultObjectsAclBasic(bucketName, roleEntityBasic2, roleEntityBasic3_owner), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic3_owner), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageDefaultObjectsAclBasicDelete(bucketName, roleEntityBasic1), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic1), + testAccCheckGoogleStorageDefaultObjectAclDelete(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageDefaultObjectAclDelete(bucketName, roleEntityBasic3_reader), + ), + }, + }, + }) +} + +func TestAccStorageDefaultObjectAcl_downgrade(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageDefaultObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageDefaultObjectsAclBasic(bucketName, roleEntityBasic2, roleEntityBasic3_owner), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic3_owner), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageDefaultObjectsAclBasic(bucketName, roleEntityBasic2, roleEntityBasic3_reader), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic3_reader), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageDefaultObjectsAclBasicDelete(bucketName, roleEntityBasic1), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageDefaultObjectAcl(bucketName, roleEntityBasic1), + testAccCheckGoogleStorageDefaultObjectAclDelete(bucketName, roleEntityBasic2), + testAccCheckGoogleStorageDefaultObjectAclDelete(bucketName, roleEntityBasic3_reader), + ), + }, + }, + }) +} + +// Test that we allow the API to reorder our role entities without perma-diffing. +func TestAccStorageDefaultObjectAcl_unordered(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageDefaultObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageDefaultObjectAclUnordered(bucketName), + }, + }, + }) +} + +func testAccCheckGoogleStorageDefaultObjectAcl(bucket, roleEntityS string) resource.TestCheckFunc { + return func(s *terraform.State) error { + roleEntity, _ := getRoleEntityPair(roleEntityS) + config := testAccProvider.Meta().(*Config) + + res, err := config.clientStorage.DefaultObjectAccessControls.Get(bucket, + roleEntity.Entity).Do() + + if err != nil { + return fmt.Errorf("Error retrieving contents of storage default Acl for bucket %s: %s", bucket, err) + } + + if res.Role != roleEntity.Role { + return fmt.Errorf("Error, Role mismatch %s != %s", res.Role, roleEntity.Role) + } + + return nil + } +} + +func testAccStorageDefaultObjectAclDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + + if rs.Type != "google_storage_default_object_acl" { + continue + } + + bucket := rs.Primary.Attributes["bucket"] + + _, err := config.clientStorage.DefaultObjectAccessControls.List(bucket).Do() + if err == nil { + return fmt.Errorf("Default Storage Object Acl for bucket %s still exists", bucket) + } + } + return nil +} + +func testAccCheckGoogleStorageDefaultObjectAclDelete(bucket, roleEntityS string) resource.TestCheckFunc { + return func(s *terraform.State) error { + roleEntity, _ := getRoleEntityPair(roleEntityS) + config := testAccProvider.Meta().(*Config) + + _, err := config.clientStorage.DefaultObjectAccessControls.Get(bucket, roleEntity.Entity).Do() + + if err != nil { + return nil + } + + return fmt.Errorf("Error, Object Default Acl Entity still exists %s for bucket %s", + roleEntity.Entity, bucket) + } +} + +func testGoogleStorageDefaultObjectsAclBasicDelete(bucketName, roleEntity string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_default_object_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s"] +} +`, bucketName, roleEntity) +} + +func testGoogleStorageDefaultObjectsAclBasic(bucketName, roleEntity1, roleEntity2 string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_default_object_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s"] +} +`, bucketName, roleEntity1, roleEntity2) +} + +func testGoogleStorageDefaultObjectAclUnordered(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_default_object_acl" "acl" { + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s", "%s", "%s", "%s"] +} +`, bucketName, roleEntityBasic1, roleEntityViewers, roleEntityOwners, roleEntityBasic2, roleEntityEditors) +} diff --git a/provider/terraform/tests/resource_storage_notification_test.go b/provider/terraform/tests/resource_storage_notification_test.go new file mode 100644 index 000000000000..6a6b8f114e8f --- /dev/null +++ b/provider/terraform/tests/resource_storage_notification_test.go @@ -0,0 +1,252 @@ +package google + +import ( + "fmt" + "os" + "reflect" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "google.golang.org/api/storage/v1" +) + +var ( + payload = "JSON_API_V1" +) + +func TestAccStorageNotification_basic(t *testing.T) { + t.Parallel() + + skipIfEnvNotSet(t, "GOOGLE_PROJECT") + + var notification storage.Notification + bucketName := testBucketName() + topicName := fmt.Sprintf("tf-pstopic-test-%d", acctest.RandInt()) + topic := fmt.Sprintf("//pubsub.googleapis.com/projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageNotificationDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageNotificationBasic(bucketName, topicName, topic), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageNotificationExists( + "google_storage_notification.notification", ¬ification), + resource.TestCheckResourceAttr( + "google_storage_notification.notification", "bucket", bucketName), + resource.TestCheckResourceAttr( + "google_storage_notification.notification", "topic", topic), + resource.TestCheckResourceAttr( + "google_storage_notification.notification", "payload_format", payload), + resource.TestCheckResourceAttr( + "google_storage_notification.notification_with_prefix", "object_name_prefix", "foobar"), + ), + }, + resource.TestStep{ + ResourceName: "google_storage_notification.notification", + ImportState: true, + ImportStateVerify: true, + }, + resource.TestStep{ + ResourceName: "google_storage_notification.notification_with_prefix", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccStorageNotification_withEventsAndAttributes(t *testing.T) { + t.Parallel() + + skipIfEnvNotSet(t, "GOOGLE_PROJECT") + + var notification storage.Notification + bucketName := testBucketName() + topicName := fmt.Sprintf("tf-pstopic-test-%d", acctest.RandInt()) + topic := fmt.Sprintf("//pubsub.googleapis.com/projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) + eventType1 := "OBJECT_FINALIZE" + eventType2 := "OBJECT_ARCHIVE" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageNotificationDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageNotificationOptionalEventsAttributes(bucketName, topicName, topic, eventType1, eventType2), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageNotificationExists( + "google_storage_notification.notification", ¬ification), + resource.TestCheckResourceAttr( + "google_storage_notification.notification", "bucket", bucketName), + resource.TestCheckResourceAttr( + "google_storage_notification.notification", "topic", topic), + resource.TestCheckResourceAttr( + "google_storage_notification.notification", "payload_format", payload), + testAccCheckStorageNotificationCheckEventType( + ¬ification, []string{eventType1, eventType2}), + testAccCheckStorageNotificationCheckAttributes( + ¬ification, "new-attribute", "new-attribute-value"), + ), + }, + resource.TestStep{ + ResourceName: "google_storage_notification.notification", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccStorageNotificationDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_storage_notification" { + continue + } + + bucket, notificationID := resourceStorageNotificationParseID(rs.Primary.ID) + + _, err := config.clientStorage.Notifications.Get(bucket, notificationID).Do() + if err == nil { + return fmt.Errorf("Notification configuration still exists") + } + } + + return nil +} + +func testAccCheckStorageNotificationExists(resource string, notification *storage.Notification) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resource] + if !ok { + return fmt.Errorf("Not found: %s", resource) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := testAccProvider.Meta().(*Config) + + bucket, notificationID := resourceStorageNotificationParseID(rs.Primary.ID) + + found, err := config.clientStorage.Notifications.Get(bucket, notificationID).Do() + if err != nil { + return err + } + + if found.Id != notificationID { + return fmt.Errorf("Storage notification configuration not found") + } + + *notification = *found + + return nil + } +} + +func testAccCheckStorageNotificationCheckEventType(notification *storage.Notification, eventTypes []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if !reflect.DeepEqual(notification.EventTypes, eventTypes) { + return fmt.Errorf("Target event types are incorrect. Expected %s, got %s", eventTypes, notification.EventTypes) + } + return nil + } +} + +func testAccCheckStorageNotificationCheckAttributes(notification *storage.Notification, key, value string) resource.TestCheckFunc { + return func(s *terraform.State) error { + val, ok := notification.CustomAttributes[key] + if !ok { + return fmt.Errorf("Custom attribute with key %s not found", key) + } + + if val != value { + return fmt.Errorf("Custom attribute value did not match for key %s: expected %s but found %s", key, value, val) + } + return nil + } +} + +func testGoogleStorageNotificationBasic(bucketName, topicName, topic string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +// We have to provide GCS default storage account with the permission +// to publish to a Cloud Pub/Sub topic from this project +// Otherwise notification configuration won't work +data "google_storage_project_service_account" "gcs_account" {} + +resource "google_pubsub_topic_iam_binding" "binding" { + topic = "${google_pubsub_topic.topic.name}" + role = "roles/pubsub.publisher" + + members = ["serviceAccount:${data.google_storage_project_service_account.gcs_account.email_address}"] +} + +resource "google_storage_notification" "notification" { + bucket = "${google_storage_bucket.bucket.name}" + payload_format = "JSON_API_V1" + topic = "${google_pubsub_topic.topic.id}" + depends_on = ["google_pubsub_topic_iam_binding.binding"] +} + +resource "google_storage_notification" "notification_with_prefix" { + bucket = "${google_storage_bucket.bucket.name}" + payload_format = "JSON_API_V1" + topic = "${google_pubsub_topic.topic.id}" + object_name_prefix = "foobar" + depends_on = ["google_pubsub_topic_iam_binding.binding"] +} + +`, bucketName, topicName) +} + +func testGoogleStorageNotificationOptionalEventsAttributes(bucketName, topicName, topic, eventType1, eventType2 string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_pubsub_topic" "topic" { + name = "%s" +} + +// We have to provide GCS default storage account with the permission +// to publish to a Cloud Pub/Sub topic from this project +// Otherwise notification configuration won't work +data "google_storage_project_service_account" "gcs_account" {} + +resource "google_pubsub_topic_iam_binding" "binding" { + topic = "${google_pubsub_topic.topic.name}" + role = "roles/pubsub.publisher" + + members = ["serviceAccount:${data.google_storage_project_service_account.gcs_account.email_address}"] +} + +resource "google_storage_notification" "notification" { + bucket = "${google_storage_bucket.bucket.name}" + payload_format = "JSON_API_V1" + topic = "${google_pubsub_topic.topic.id}" + event_types = ["%s","%s"] + custom_attributes { + new-attribute = "new-attribute-value" + } + depends_on = ["google_pubsub_topic_iam_binding.binding"] +} + +`, bucketName, topicName, eventType1, eventType2) +} diff --git a/provider/terraform/tests/resource_storage_object_acl_test.go b/provider/terraform/tests/resource_storage_object_acl_test.go new file mode 100644 index 000000000000..3b10b6b0c349 --- /dev/null +++ b/provider/terraform/tests/resource_storage_object_acl_test.go @@ -0,0 +1,382 @@ +package google + +import ( + "fmt" + "io/ioutil" + "math/rand" + "testing" + "time" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +var tfObjectAcl, errObjectAcl = ioutil.TempFile("", "tf-gce-test") + +func testAclObjectName() string { + return fmt.Sprintf("%s-%d", "tf-test-acl-object", + rand.New(rand.NewSource(time.Now().UnixNano())).Int()) +} + +func TestAccStorageObjectAcl_basic(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + objectName := testAclObjectName() + objectData := []byte("data data data") + ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if errObjectAcl != nil { + panic(errObjectAcl) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageObjectsAclBasic1(bucketName, objectName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic1), + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic2), + ), + }, + }, + }) +} + +func TestAccStorageObjectAcl_upgrade(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + objectName := testAclObjectName() + objectData := []byte("data data data") + ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if errObjectAcl != nil { + panic(errObjectAcl) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageObjectsAclBasic1(bucketName, objectName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic1), + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic2), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageObjectsAclBasic2(bucketName, objectName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic2), + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic3_owner), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageObjectsAclBasicDelete(bucketName, objectName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObjectAclDelete(bucketName, + objectName, roleEntityBasic1), + testAccCheckGoogleStorageObjectAclDelete(bucketName, + objectName, roleEntityBasic2), + testAccCheckGoogleStorageObjectAclDelete(bucketName, + objectName, roleEntityBasic3_reader), + ), + }, + }, + }) +} + +func TestAccStorageObjectAcl_downgrade(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + objectName := testAclObjectName() + objectData := []byte("data data data") + ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if errObjectAcl != nil { + panic(errObjectAcl) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageObjectsAclBasic2(bucketName, objectName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic2), + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic3_owner), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageObjectsAclBasic3(bucketName, objectName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic2), + testAccCheckGoogleStorageObjectAcl(bucketName, + objectName, roleEntityBasic3_reader), + ), + }, + + resource.TestStep{ + Config: testGoogleStorageObjectsAclBasicDelete(bucketName, objectName), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleStorageObjectAclDelete(bucketName, + objectName, roleEntityBasic1), + testAccCheckGoogleStorageObjectAclDelete(bucketName, + objectName, roleEntityBasic2), + testAccCheckGoogleStorageObjectAclDelete(bucketName, + objectName, roleEntityBasic3_reader), + ), + }, + }, + }) +} + +func TestAccStorageObjectAcl_predefined(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + objectName := testAclObjectName() + objectData := []byte("data data data") + ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if errObjectAcl != nil { + panic(errObjectAcl) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageObjectsAclPredefined(bucketName, objectName), + }, + }, + }) +} + +// Test that we allow the API to reorder our role entities without perma-diffing. +func TestAccStorageObjectAcl_unordered(t *testing.T) { + t.Parallel() + + bucketName := testBucketName() + objectName := testAclObjectName() + objectData := []byte("data data data") + ioutil.WriteFile(tfObjectAcl.Name(), objectData, 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if errObjectAcl != nil { + panic(errObjectAcl) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccStorageObjectAclDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testGoogleStorageObjectAclUnordered(bucketName, objectName), + }, + }, + }) +} + +func testAccCheckGoogleStorageObjectAcl(bucket, object, roleEntityS string) resource.TestCheckFunc { + return func(s *terraform.State) error { + roleEntity, _ := getRoleEntityPair(roleEntityS) + config := testAccProvider.Meta().(*Config) + + res, err := config.clientStorage.ObjectAccessControls.Get(bucket, + object, roleEntity.Entity).Do() + + if err != nil { + return fmt.Errorf("Error retrieving contents of acl for bucket %s: %s", bucket, err) + } + + if res.Role != roleEntity.Role { + return fmt.Errorf("Error, Role mismatch %s != %s", res.Role, roleEntity.Role) + } + + return nil + } +} + +func testAccCheckGoogleStorageObjectAclDelete(bucket, object, roleEntityS string) resource.TestCheckFunc { + return func(s *terraform.State) error { + roleEntity, _ := getRoleEntityPair(roleEntityS) + config := testAccProvider.Meta().(*Config) + + _, err := config.clientStorage.ObjectAccessControls.Get(bucket, + object, roleEntity.Entity).Do() + + if err != nil { + return nil + } + + return fmt.Errorf("Error, Entity still exists %s", roleEntity.Entity) + } +} + +func testAccStorageObjectAclDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_storage_bucket_acl" { + continue + } + + bucket := rs.Primary.Attributes["bucket"] + object := rs.Primary.Attributes["object"] + + _, err := config.clientStorage.ObjectAccessControls.List(bucket, object).Do() + + if err == nil { + return fmt.Errorf("Acl for bucket %s still exists", bucket) + } + } + + return nil +} + +func testGoogleStorageObjectsAclBasicDelete(bucketName string, objectName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" +} + +resource "google_storage_object_acl" "acl" { + object = "${google_storage_bucket_object.object.name}" + bucket = "${google_storage_bucket.bucket.name}" + role_entity = [] +} +`, bucketName, objectName, tfObjectAcl.Name()) +} + +func testGoogleStorageObjectsAclBasic1(bucketName string, objectName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" +} + +resource "google_storage_object_acl" "acl" { + object = "${google_storage_bucket_object.object.name}" + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s"] +} +`, bucketName, objectName, tfObjectAcl.Name(), + roleEntityBasic1, roleEntityBasic2) +} + +func testGoogleStorageObjectsAclBasic2(bucketName string, objectName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" +} + +resource "google_storage_object_acl" "acl" { + object = "${google_storage_bucket_object.object.name}" + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s"] +} +`, bucketName, objectName, tfObjectAcl.Name(), + roleEntityBasic2, roleEntityBasic3_owner) +} + +func testGoogleStorageObjectsAclBasic3(bucketName string, objectName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" +} + +resource "google_storage_object_acl" "acl" { + object = "${google_storage_bucket_object.object.name}" + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s"] +} +`, bucketName, objectName, tfObjectAcl.Name(), + roleEntityBasic2, roleEntityBasic3_reader) +} + +func testGoogleStorageObjectsAclPredefined(bucketName string, objectName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" +} + +resource "google_storage_object_acl" "acl" { + object = "${google_storage_bucket_object.object.name}" + bucket = "${google_storage_bucket.bucket.name}" + predefined_acl = "projectPrivate" +} +`, bucketName, objectName, tfObjectAcl.Name()) +} + +func testGoogleStorageObjectAclUnordered(bucketName, objectName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" +} + +resource "google_storage_bucket_object" "object" { + name = "%s" + bucket = "${google_storage_bucket.bucket.name}" + source = "%s" +} + +resource "google_storage_object_acl" "acl" { + object = "${google_storage_bucket_object.object.name}" + bucket = "${google_storage_bucket.bucket.name}" + role_entity = ["%s", "%s", "%s", "%s", "%s"] +} +`, bucketName, objectName, tfObjectAcl.Name(), roleEntityBasic1, roleEntityViewers, roleEntityOwners, roleEntityBasic2, roleEntityEditors) +} diff --git a/provider/terraform/tests/resource_usage_export_bucket_test.go b/provider/terraform/tests/resource_usage_export_bucket_test.go new file mode 100644 index 000000000000..4a5b86c155db --- /dev/null +++ b/provider/terraform/tests/resource_usage_export_bucket_test.go @@ -0,0 +1,59 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccComputeResourceUsageExportBucket(t *testing.T) { + org := getTestOrgFromEnv(t) + billingId := getTestBillingAccountFromEnv(t) + + baseProject := "ub-" + acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccResourceUsageExportBucket(baseProject, org, billingId), + }, + // Test import. + resource.TestStep{ + ResourceName: "google_project_usage_export_bucket.ueb", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccResourceUsageExportBucket(baseProject, org, billingId string) string { + return fmt.Sprintf(` +resource "google_project" "base" { + project_id = "%s" + name = "Export Bucket Base" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "service" { + project = "${google_project.base.project_id}" + service = "compute.googleapis.com" +} + +resource "google_storage_bucket" "bucket" { + name = "b-${google_project.base.project_id}" + project = "${google_project_service.service.project}" +} + +resource "google_project_usage_export_bucket" "ueb" { + project = "${google_project.base.project_id}" + bucket_name = "${google_storage_bucket.bucket.name}" + prefix = "foobar" +} +`, baseProject, org, billingId) +} diff --git a/provider/terraform/utils/iam_compute_subnetwork.go b/provider/terraform/utils/iam_compute_subnetwork.go.erb similarity index 92% rename from provider/terraform/utils/iam_compute_subnetwork.go rename to provider/terraform/utils/iam_compute_subnetwork.go.erb index 7f9eaa4dde78..7c19b6676080 100644 --- a/provider/terraform/utils/iam_compute_subnetwork.go +++ b/provider/terraform/utils/iam_compute_subnetwork.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -12,14 +13,18 @@ import ( var IamComputeSubnetworkSchema = map[string]*schema.Schema{ "subnetwork": { - Deprecated: "This resource is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% if version.nil? || version == 'ga' -%> + Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeString, Required: true, ForceNew: true, }, "project": { - Deprecated: "This resource is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% if version.nil? || version == 'ga' -%> + Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeString, Optional: true, Computed: true, @@ -27,7 +32,9 @@ var IamComputeSubnetworkSchema = map[string]*schema.Schema{ }, "region": { - Deprecated: "This resource is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% if version.nil? || version == 'ga' -%> + Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeString, Optional: true, Computed: true, diff --git a/provider/terraform/utils/node_config.go b/provider/terraform/utils/node_config.go.erb similarity index 98% rename from provider/terraform/utils/node_config.go rename to provider/terraform/utils/node_config.go.erb index 948f60db1171..297ab05c852e 100644 --- a/provider/terraform/utils/node_config.go +++ b/provider/terraform/utils/node_config.go.erb @@ -1,3 +1,4 @@ +<% autogen_exception -%> package google import ( @@ -142,7 +143,9 @@ var schemaNodeConfig = &schema.Schema{ }, "taint": { +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeList, Optional: true, ForceNew: true, @@ -170,7 +173,9 @@ var schemaNodeConfig = &schema.Schema{ }, "workload_metadata_config": { +<% if version.nil? || version == 'ga' -%> Deprecated: "This field is in beta and will be removed from this provider. Use it in the the google-beta provider instead. See https://terraform.io/docs/providers/google/provider_versions.html for more details.", +<% end -%> Type: schema.TypeList, Optional: true, ForceNew: true, diff --git a/provider/terraform/utils/provider.go b/provider/terraform/utils/provider.go index 69c148aca755..85d752d06c1e 100644 --- a/provider/terraform/utils/provider.go +++ b/provider/terraform/utils/provider.go @@ -106,6 +106,7 @@ func Provider() terraform.ResourceProvider { GeneratedFilestoreResourcesMap, GeneratedRedisResourcesMap, GeneratedResourceManagerResourcesMap, + GeneratedStorageResourcesMap, GeneratedMonitoringResourcesMap, map[string]*schema.Resource{ "google_app_engine_application": resourceAppEngineApplication(), diff --git a/provider/terraform/utils/transport.go b/provider/terraform/utils/transport.go index 7b860c7b4ac3..c29b8dad97ea 100644 --- a/provider/terraform/utils/transport.go +++ b/provider/terraform/utils/transport.go @@ -62,6 +62,12 @@ func sendRequest(config *Config, method, rawurl string, body map[string]interfac return nil, err } + // 204 responses will have no body, so we're going to error with "EOF" if we + // try to parse it. Instead, we can just return nil. + if res.StatusCode == 204 { + return nil, nil + } + result := make(map[string]interface{}) if err := json.NewDecoder(res.Body).Decode(&result); err != nil { return nil, err diff --git a/provider/test_data/property.rb b/provider/test_data/property.rb index ad76f36768ff..c51fde76d71f 100644 --- a/provider/test_data/property.rb +++ b/provider/test_data/property.rb @@ -31,7 +31,7 @@ def initialize(provider) # rubocop:disable Metrics/PerceivedComplexity def property(prop, index, comparator, value, start_indent = 0, name_override = nil) - Google::LOGGER.info \ + Google::LOGGER.debug \ "Generating test #{prop.out_name}[#{index}] #{comparator} #{value}" if prop.class <= Api::Type::ResourceRef diff --git a/provider/test_matrix.rb b/provider/test_matrix.rb index 3d37f2cc8ba9..0cd69faad908 100644 --- a/provider/test_matrix.rb +++ b/provider/test_matrix.rb @@ -26,7 +26,7 @@ def initialize end def add(matrix, file, object) - Google::LOGGER.info \ + Google::LOGGER.debug \ "Registering test matrix for #{object.name} @ #{file}" @matrixes << matrix end @@ -104,7 +104,7 @@ def pop(ensurable, exists = :none, changes = :none, has_name = :none, # Ensures that all test contexts are defined def verify - Google::LOGGER.info "Verifying test matrix for #{@object.name} @ #{@file}" + Google::LOGGER.debug "Verifying test matrix for #{@object.name} @ #{@file}" verify_topics verify_match_expectations fail_if_not_all_popped unless @hierarchy.empty? diff --git a/templates/ansible/facts.erb b/templates/ansible/facts.erb index d6be69dc070d..7df8cd0375a0 100644 --- a/templates/ansible/facts.erb +++ b/templates/ansible/facts.erb @@ -25,28 +25,23 @@ ANSIBLE_METADATA = {'metadata_version': <%= metadata_version -%>, DOCUMENTATION = ''' --- -module: <%= module_name(object) %>_facts -description: - - Gather facts for GCP <%= object.name %> -short_description: Gather facts for GCP <%= object.name %> -version_added: <%= lines(@config.manifest.get('version_added', object)) -%> -author: <%= lines(@config.manifest.get('author', object)) -%> -requirements: -<% @config.manifest.get('requirements', object).each do |line| -%> -<%= lines(indent(bullet_line(line, 4, false, false), 4)) -%> -<% end -%> -<% if object.facts.has_filters || !uri_props.empty? -%> -options: -<% end -%> -<% if object.facts.has_filters -%> - <%= object.facts.filter.name.underscore -%>: - description: -<%= lines(indent(wrap_field(object.facts.filter.description, 11), 11)) -%> -<% end -%> -<% uri_props.each do |prop| -%> -<%= lines(indent(doc_property_yaml(prop, object, 4), 4)) -%> -<% end -%> -extends_documentation_fragment: gcp +<%= to_yaml({ + 'module' => "#{module_name(object)}_facts", + 'description' => ["Gather facts for GCP #{object.name}"], + 'short_description' => "Gather facts for GCP #{object.name}", + 'version_added' => @config.manifest.get('version_added', object).to_f, + 'author' => @config.manifest.get('author', object), + 'requirements' => @config.manifest.get('requirements', object), + 'options' => [ + ({ + object.facts.filter.name.underscore => { + 'description' => format_description(object.facts.filter.description) + } + } if object.facts.has_filters), + uri_props.map { |p| documentation_for_property(p) } + ].flatten.compact.reduce({}, :merge), + 'extends_documentation_fragment' => 'gcp' +})-%> ''' <% if example and example.facts -%> @@ -56,14 +51,14 @@ EXAMPLES = ''' <% end -%> RETURN = ''' -items: - description: List of items - returned: always - type: complex - contains: -<% object.all_user_properties.each do |prop| -%> -<%= lines(indent(return_property_yaml(prop, 8), 8)) -%> -<% end -%> +<%= to_yaml({ + 'items' => { + 'description' => 'List of items', + 'returned' => 'always', + 'type' => 'complex', + 'contains' => object.all_user_properties.map { |p| returns_for_property(p) }.reduce({}, :merge) + } +})-%> ''' ################################################################################ diff --git a/templates/ansible/resource.erb b/templates/ansible/resource.erb index eb9da171e57e..bd7c588e9d31 100644 --- a/templates/ansible/resource.erb +++ b/templates/ansible/resource.erb @@ -25,35 +25,31 @@ ANSIBLE_METADATA = {'metadata_version': <%= metadata_version -%>, DOCUMENTATION = ''' --- -module: <%= module_name(object) %> -description: -<%= lines(indent(bullet_lines(object.description, 4), 4)) -%> -short_description: Creates a GCP <%= object.name %> -version_added: <%= lines(@config.manifest.get('version_added', object)) -%> -author: <%= lines(@config.manifest.get('author', object)) -%> -requirements: -<% @config.manifest.get('requirements', object).each do |line| -%> -<%= lines(indent(bullet_line(line, 4, false, false), 4)) -%> -<% end -%> -options: - state: - description: - - Whether the given object should exist in GCP - choices: ['present', 'absent'] - default: 'present' -<% object.all_user_properties.reject(&:output).each do |prop| -%> -<%= lines(indent(doc_property_yaml(prop, object, 4), 4)) -%> -<% end -%> -extends_documentation_fragment: gcp -<% if object.references -%> -notes: -<% if object.references.api -%> - - "API Reference: U(<%= object.references.api -%>)" -<% end -%> -<% object.references.guides.each do |guide, link| -%> - - "<%= guide -%>: U(<%= link -%>)" -<% end # object.references -%> -<% end # if object.references -%> +<%= to_yaml({ + 'module' => module_name(object), + 'description' => format_description(object.description), + 'short_description' => "Creates a GCP #{object.name}", + 'version_added' => @config.manifest.get('version_added', object).to_f, + 'author' => @config.manifest.get('author', object), + 'requirements' => @config.manifest.get('requirements', object), + 'options' => [ + { + 'state' => { + 'description' => ['Whether the given object should exist in GCP'], + 'choices' => ['present', 'absent'], + 'default' => 'present' + }, + }, + object.all_user_properties.reject(&:output).map { |p| documentation_for_property(p) } + ].flatten.compact.reduce({}, :merge), + 'extends_documentation_fragment' => 'gcp', + 'notes' => ( + [ + ("API Reference: U(#{object.references.api})" if object.references.api), + object.references.guides.map { |guide, link| "#{guide}: U(#{link})" } + ].flatten.compact if object.references + ) +})-%> ''' <% if example -%> @@ -69,9 +65,7 @@ EXAMPLES = ''' <% end -%> RETURN = ''' -<% object.all_user_properties.each do |prop| -%> -<%= lines(indent(return_property_yaml(prop, 4), 4)) -%> -<% end -%> +<%= to_yaml(object.all_user_properties.map { |p| returns_for_property(p) }.reduce({}, :merge)) -%> ''' ################################################################################ diff --git a/templates/inspec/doc-template.md.erb b/templates/inspec/doc-template.md.erb new file mode 100644 index 000000000000..6eec8266b412 --- /dev/null +++ b/templates/inspec/doc-template.md.erb @@ -0,0 +1,19 @@ +<% autogen_exception -%> + +--- +title: About the <%= object.name -%> resource +platform: gcp +--- + +## Syntax +A `<%= resource_name(object, product_ns) -%>` is used to test a Google <%= object.name -%> resource + +TODO: Examples + +## Properties +Properties that can be accessed from the `<%= resource_name(object, product_ns) -%>` resource: +<% object.properties.each do |prop| -%> + * `<%= "#{prop.out_name}" -%>`: <%= "#{prop.description}" -%> + +<%= sub_property_descriptions(prop) -%> +<% end -%> \ No newline at end of file diff --git a/templates/inspec/nested_object.erb b/templates/inspec/nested_object.erb new file mode 100644 index 000000000000..9a20dd7bb7b1 --- /dev/null +++ b/templates/inspec/nested_object.erb @@ -0,0 +1,66 @@ +<%# The license inside this block applies to this file. +# Copyright 2017 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +-%> +<%= compile('templates/license.erb') -%> + +<%= lines(autogen_notice :ruby) -%> + +<% + requires = generate_requires(nested_properties) +-%> +<%= lines(emit_requires(requires)) -%> +module Google + module <%= product_ns %> + module Property + class <%= class_name -%> + +<% if !nested_properties.empty? -%> +<% nested_properties.each do |prop| -%> + attr_reader :<%= prop.out_name %> +<% end # nested_properties.each -%> + +<% end # if !nested_properties.empty? -%> + + def initialize(args = nil) + return nil if args.nil? +<% nested_properties.each do |prop| -%> +<% + if time?(prop) + init = "Time.new(args['#{prop.api_name}'])" + elsif primitive?(prop) + init = "args['#{prop.api_name}']" + elsif typed_array?(prop) + init = "#{prop.property_type}.parse(args['#{prop.api_name}'])" + else + init = "#{prop.property_type}.new(args['#{prop.api_name}'])" + end + parse_code = "@#{prop.out_name} = #{init}" +-%> +<%= lines(indent(parse_code, 10)) -%> +<% end # nested_properties.each -%> + end + end + +<% if emit_array -%> + class <%= class_name %>Array + def self.parse(value) + return if value.nil? + return <%= class_name %>.new(value) unless value.is_a?(::Array) + value.map { |v| <%= class_name %>.new(v) } + end + end +<% end #if emit_array -%> + end + end +end \ No newline at end of file diff --git a/templates/inspec/plural_resource.erb b/templates/inspec/plural_resource.erb index 3628efec92c1..f56ea78f27ae 100644 --- a/templates/inspec/plural_resource.erb +++ b/templates/inspec/plural_resource.erb @@ -14,4 +14,65 @@ -%> <%= compile 'templates/license.erb' -%> -<%= lines(autogen_notice :ruby) -%> \ No newline at end of file +<%= lines(autogen_notice :ruby) -%> + +class <%= object.name -%>s < Inspec.resource(1) + +<% +name = "google_#{product_ns.downcase}_#{object.name.underscore}" +-%> + name '<%= name.pluralize -%>' + desc '<%= object.name -%> plural resource' + supports platform: 'gcp2' + + filter_table_config = FilterTable.create +<% object.all_user_properties.each do |prop| -%> + + <%= "filter_table_config.add(:#{prop.out_name.pluralize}, field: :#{prop.out_name})" -%> +<% end # object.all_user_properties.each do -%> + + + filter_table_config.connect(self, :fetch_data) + +<% +base = "'#{object.self_link_url[0].join}'" +-%> + def base + <%= base %> + end + + def url + '<%= format_url(object.base_url) %>' + end + + def initialize(params = {}) + @params = params + end + + def fetch_resource(params) + get_request = inspec.backend.fetch(base, url, params) + end + +<% +link_query = object.self_link_query || object.collection_url_response +-%> + def fetch_data + @data = fetch_wrapped_resource(<%= "'#{link_query.kind}'" -%>, <%= "'#{link_query.items}'" -%>) + end + + def fetch_wrapped_resource(wrap_kind, wrap_path) + result = fetch_resource(@params) + return if result.nil? || !result.key?(wrap_path) + + # Conversion of string -> object hash to symbol -> object hash that InSpec needs + converted = [] + result[wrap_path].each do |hash| + hash_with_symbols = {} + hash.each_pair { |k, v| hash_with_symbols[k.to_sym] = v } + converted.push(hash_with_symbols) + end + + converted + end + +end \ No newline at end of file diff --git a/templates/inspec/singular_resource.erb b/templates/inspec/singular_resource.erb index 0858313284a5..d5e59709f0ae 100644 --- a/templates/inspec/singular_resource.erb +++ b/templates/inspec/singular_resource.erb @@ -16,12 +16,23 @@ <%= lines(autogen_notice :ruby) -%> +<% + require 'google/string_utils' + + inside_indent = 8 + + requires = generate_requires(object.all_user_properties) + requires << 'inspec/resource' + requires << 'google/hash_utils' +-%> +<%= lines(emit_requires(requires)) -%> + # A provider to manage <%= @api.name -%> resources. class <%= object.name -%> < Inspec.resource(1) - name 'google_<%= product_ns.downcase -%>_<%= object.name.downcase -%>' + name '<%= resource_name(object, product_ns) -%>' desc '<%= object.name -%>' - supports platform: 'gcp-mm' + supports platform: 'gcp2' <% object.properties.each do |prop| -%> <%= "attr_reader :#{prop.out_name}" -%> @@ -32,13 +43,63 @@ class <%= object.name -%> < Inspec.resource(1) end def url - '<%= url(object) %>' + '<%= format_url(object.self_link_url[1]) %>' + end + +<% if object.self_link_query.nil? -%> + def initialize(params) + @fetched = fetch_resource(params) + parse unless @fetched.nil? + end +<% else # object.self_link_query.nil? -%> + # TODO(slevenick) for other products + def initialize(params) + raise 'Not implemented' + end +<% end # object.self_link_query.nil? -%> + + def fetch_resource(params) + get_request = inspec.backend.fetch(base, url, params) end - # TODO - def parse end +<% if object.self_link_query.nil? -%> + def initialize(params) + @fetched = fetch_resource(params) + parse unless @fetched.nil? + end +<% else # object.self_link_query.nil? -%> + # TODO(slevenick) for other products + def initialize(params) + raise 'Not implemented' + end +<% end # object.self_link_query.nil? -%> + + def fetch_resource(params) + get_request = inspec.backend.fetch(base, url, params) + end + + def parse +<% + parse_code = object.properties.map do |prop| + name = prop.out_name + + if time?(prop) + init = "Time.new(@fetched['#{prop.api_name}'])" + elsif primitive?(prop) + init = "@fetched['#{prop.api_name}']" + elsif typed_array?(prop) + init = "#{prop.property_type}.parse(@fetched['#{prop.api_name}'])" + else + init = "#{prop.property_type}.new(@fetched['#{prop.api_name}'])" + end + + assignment = "@#{name} = #{init}" + end +-%> +<%= lines(indent(parse_code, 4)) -%> + end def exists? !@fetched.nil? end -end \ No newline at end of file +end diff --git a/templates/puppet/resource.erb b/templates/puppet/resource.erb index c831e8f8caa9..ecce6b3df1cc 100644 --- a/templates/puppet/resource.erb +++ b/templates/puppet/resource.erb @@ -42,7 +42,7 @@ -%> <%= lines(emit_requires(requires)) -%> -<% Google::LOGGER.info "Generating #{object.name}: #{object.out_name}" -%> +<% Google::LOGGER.debug "Generating #{object.name}: #{object.out_name}" -%> Puppet::Type.type(:<%= object.out_name -%>).provide(:google) do mk_resource_methods diff --git a/templates/puppet/type.erb b/templates/puppet/type.erb index 6562ee68af8f..fe21a01ed614 100644 --- a/templates/puppet/type.erb +++ b/templates/puppet/type.erb @@ -25,7 +25,7 @@ -%> <%= lines(emit_requires(requires)) -%> -<% Google::LOGGER.info "Generating #{object.name}: #{object.out_name}" -%> +<% Google::LOGGER.debug "Generating #{object.name}: #{object.out_name}" -%> Puppet::Type.newtype(:<%= object.out_name -%>) do <%= format_description(object, 2, '@doc =') %> @@ -39,7 +39,7 @@ Puppet::Type.newtype(:<%= object.out_name -%>) do unless object.parameters.nil? object.parameters.each do |param| if param.class <= Api::Type::ResourceRef - Google::LOGGER.info \ + Google::LOGGER.debug \ "Generating autorequire #{object.name}.#{param.name}: #{param.type}" -%> autorequire(:<%= param.out_type -%>) do @@ -103,7 +103,7 @@ Puppet::Type.newtype(:<%= object.out_name -%>) do <% unless object.parameters.nil? -%> <% object.parameters.each do |p| -%> -<% Google::LOGGER.info "Generating param #{object.name}.#{p.name}:#{p.type}" -%> +<% Google::LOGGER.debug "Generating param #{object.name}.#{p.name}:#{p.type}" -%> <%= namevar = identities.include?(p.name) ? 'namevar: true' : nil @@ -132,7 +132,7 @@ Puppet::Type.newtype(:<%= object.out_name -%>) do <% end -%> <% end -%> <% object.properties.each do |p| - Google::LOGGER.info "Generating #{object.name}.#{p.name}: #{p.type}" + Google::LOGGER.debug "Generating #{object.name}.#{p.name}: #{p.type}" -%> <%= diff --git a/templates/terraform/custom_expand/resourceref_as_string.go.erb b/templates/terraform/custom_expand/resourceref_as_string.go.erb new file mode 100644 index 000000000000..34e8b9099530 --- /dev/null +++ b/templates/terraform/custom_expand/resourceref_as_string.go.erb @@ -0,0 +1,17 @@ +<%# The license inside this block applies to this file. + # Copyright 2018 Google Inc. + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +-%> +func expand<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d *schema.ResourceData, config *Config) (interface{}, error) { + return v, nil +} diff --git a/templates/terraform/decoders/disk.erb b/templates/terraform/decoders/disk.erb index eee89387bc96..8bc1209f49d3 100644 --- a/templates/terraform/decoders/disk.erb +++ b/templates/terraform/decoders/disk.erb @@ -4,6 +4,11 @@ if v, ok := res["diskEncryptionKey"]; ok { // The raw key won't be returned, so we need to use the original. transformed["rawKey"] = d.Get("disk_encryption_key.0.raw_key") transformed["sha256"] = original["sha256"] + <% unless version.nil? || version == 'ga' %> + // The response for crypto keys often includes the version of the key which needs to be removed + // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 + transformed["kmsKeyName"] = strings.Split(original["kmsKeyName"].(string), "/cryptoKeyVersions")[0] + <% end %> res["diskEncryptionKey"] = transformed } @@ -13,6 +18,11 @@ if v, ok := res["sourceImageEncryptionKey"]; ok { // The raw key won't be returned, so we need to use the original. transformed["rawKey"] = d.Get("source_image_encryption_key.0.raw_key") transformed["sha256"] = original["sha256"] + <% unless version.nil? || version == 'ga' %> + // The response for crypto keys often includes the version of the key which needs to be removed + // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 + transformed["kmsKeyName"] = strings.Split(original["kmsKeyName"].(string), "/cryptoKeyVersions")[0] + <% end %> res["sourceImageEncryptionKey"] = transformed } @@ -22,6 +32,11 @@ if v, ok := res["sourceSnapshotEncryptionKey"]; ok { // The raw key won't be returned, so we need to use the original. transformed["rawKey"] = d.Get("source_snapshot_encryption_key.0.raw_key") transformed["sha256"] = original["sha256"] + <% unless version.nil? || version == 'ga' %> + // The response for crypto keys often includes the version of the key which needs to be removed + // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 + transformed["kmsKeyName"] = strings.Split(original["kmsKeyName"].(string), "/cryptoKeyVersions")[0] + <% end %> res["sourceSnapshotEncryptionKey"] = transformed } diff --git a/templates/terraform/examples/autoscaler_basic.tf.erb b/templates/terraform/examples/autoscaler_basic.tf.erb index 3ffd7c0f3bf0..ff6594e2d55c 100644 --- a/templates/terraform/examples/autoscaler_basic.tf.erb +++ b/templates/terraform/examples/autoscaler_basic.tf.erb @@ -46,7 +46,14 @@ resource "google_compute_instance_group_manager" "foobar" { name = "<%= ctx[:vars]['igm_name'] %>" zone = "us-central1-f" +<% if ctx[:version].nil? || ctx[:version] == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> target_pools = ["${google_compute_target_pool.foobar.self_link}"] base_instance_name = "foobar" } diff --git a/templates/terraform/examples/region_autoscaler_basic.tf.erb b/templates/terraform/examples/region_autoscaler_basic.tf.erb index 0373c85fa2fc..5001e689172b 100644 --- a/templates/terraform/examples/region_autoscaler_basic.tf.erb +++ b/templates/terraform/examples/region_autoscaler_basic.tf.erb @@ -46,7 +46,14 @@ resource "google_compute_region_instance_group_manager" "foobar" { name = "<%= ctx[:vars]['rigm_name'] %>" region = "us-central1" +<% if ctx[:version].nil? || ctx[:version] == 'ga' -%> instance_template = "${google_compute_instance_template.foobar.self_link}" +<% else -%> + version { + instance_template = "${google_compute_instance_template.foobar.self_link}" + name = "primary" + } +<% end -%> target_pools = ["${google_compute_target_pool.foobar.self_link}"] base_instance_name = "foobar" } diff --git a/templates/terraform/examples/storage_object_access_control_public_object.tf.erb b/templates/terraform/examples/storage_object_access_control_public_object.tf.erb new file mode 100644 index 000000000000..84179bd9c937 --- /dev/null +++ b/templates/terraform/examples/storage_object_access_control_public_object.tf.erb @@ -0,0 +1,16 @@ +resource "google_storage_object_access_control" "<%= ctx[:primary_resource_id] %>" { + object = "${google_storage_bucket_object.object.name}" + bucket = "${google_storage_bucket.bucket.name}" + role = "READER" + entity = "allUsers" +} + +resource "google_storage_bucket" "bucket" { + name = "<%= ctx[:vars]['bucket_name'] %>" +} + + resource "google_storage_bucket_object" "object" { + name = "<%= ctx[:vars]['object_name'] %>" + bucket = "${google_storage_bucket.bucket.name}" + source = "../static/img/header-logo.jpg" +} diff --git a/templates/terraform/resource.erb b/templates/terraform/resource.erb index 674c6a6b677d..132616118d60 100644 --- a/templates/terraform/resource.erb +++ b/templates/terraform/resource.erb @@ -51,11 +51,6 @@ func resource<%= resource_name -%>() *schema.Resource { Importer: &schema.ResourceImporter{ State: resource<%= resource_name -%>Import, }, -<% if object.min_version.name == 'beta' -%> - DeprecationMessage: `This resource is in beta and will be removed from this provider. -Use the <%= resource_name -%> resource in the terraform-provider-google-beta provider to continue using it. -See https://terraform.io/docs/providers/google/provider_versions.html for more details on beta resources.`, -<% end -%> <% unless object.async.nil? -%> Timeouts: &schema.ResourceTimeout { diff --git a/templates/terraform/resource.html.markdown.erb b/templates/terraform/resource.html.markdown.erb index dcbbed33dcbb..5c92f49f555c 100644 --- a/templates/terraform/resource.html.markdown.erb +++ b/templates/terraform/resource.html.markdown.erb @@ -77,11 +77,19 @@ To get more information about <%= object.name -%>, see: <% end -%> <% unless object.example.empty? -%> -## Example Usage + <%- object.example.each do |example| -%> + <%- unless example.skip_test -%> + + <%- end -%> +## Example Usage - <%= example.name.camelize(:upper).uncombine %> + -<% object.example.each do |example| -%> <%= example.config_documentation -%> -<%- end %> + <%- end %> <%- end -%> ## Argument Reference