chilly-photographer-60932
07/05/2019, 7:34 PM│ ├─ pulumi-nodejs:dynamic:Resource example-nodegroup-iam-simple-vpc-cni **creating failed** 1 error
+ │ ├─ kubernetes:core:ConfigMap example-nodegroup-iam-simple-nodeAccess **creating failed** 1 error
Diagnostics:
kubernetes:core:ConfigMap (example-nodegroup-iam-simple-nodeAccess):
error: Plan apply failed: Get <https://922FC00EB908914CAF76044D3533E63A.yl4.us-west-2.eks.amazonaws.com/api?timeout=32s>: net/http: TLS handshake timeout
pulumi:pulumi:Stack (aws-arcus-kitchensink-naveen):
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
error: update failed
pulumi-nodejs:dynamic:Resource (example-nodegroup-iam-simple-vpc-cni):
error: Plan apply failed: Command failed: kubectl apply -f /var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
unable to recognize "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-12437sPtTDnmfv7lm.tmp": Unauthorized
white-balloon-205
chilly-photographer-60932
07/05/2019, 7:36 PMimport * as eks from '@pulumi/eks';
import { ClusterOpts } from '../config/types/ClusterOpts';
import * as pulumi from '@pulumi/pulumi';
import * as aws from '@pulumi/aws';
import * as iam from './iam'
/**
* KubernetesCluster is a component that wraps the AWK and Kubernetes resources
* necessary to run a Kubernetes cluster
*/
export class KubernetesCluster extends pulumi.ComponentResource {
/**
* A kubeconfig that can be used to connect to the EKS cluster.
*/
public readonly eksCluster: eks.Cluster;
public readonly nodeGroups?: eks.NodeGroup[];
/**
*
* @param clusterOpts Options for creating the cluster
*/
constructor(clusterOpts: ClusterOpts) {
super('arcus:cluster', clusterOpts.name);
// const clusterOpts1: eks.ClusterOptions =
// {
// instanceRole: getRoleAndInstanceProfile.Role,
// ...clusterOpts.options
// }
// ;
const role0 = iam.createRole('example-role0');
const instanceProfile0 = new aws.iam
.InstanceProfile('example-instanceProfile0', {role: role0});
// Create an EKS cluster with a shared IAM instance role to register with the
// cluster auth.
const cluster1 = new eks.Cluster('example-nodegroup-iam-simple', {
skipDefaultNodeGroup: true,
deployDashboard: false,
instanceRole: role0,
});
// There are two approaches that can be used to add additional NodeGroups.
// 1. A `createNodeGroup` API on `eks.Cluster`
// 2. A `NodeGroup` resource which accepts an `eks.Cluster` as input
// Create the node group using an `instanceProfile` tied to the shared, cluster
// instance role registered with the cluster auth through `instanceRole`.
cluster1.createNodeGroup('example-ng-simple-ondemand', {
instanceType: 't2.medium',
desiredCapacity: 1,
minSize: 1,
maxSize: 2,
labels: {'ondemand': 'true'},
instanceProfile: instanceProfile0,
});
// this.eksCluster = new eks.Cluster(clusterOpts.name, clusterOpts1, {
// parent: this
// });
// // Create the second node group with spot t2.medium instance
// const spot = new eks.NodeGroup('example-ng-simple-spot', {
// cluster: this.eksCluster,
// instanceType: 't2.medium',
// desiredCapacity: 1,
// minSize: 1,
// maxSize: 2,
// spotPrice: '1',
// labels: {'preemptible': 'true'},
// taints: {
// 'special': {
// value: 'true',
// effect: 'NoSchedule',
// },
// },
//
// instanceProfile: getRoleAndInstanceProfile.InstanceProfile,
// }, {
// providers: { kubernetes: this.eksCluster.provider},
// });
// if (clusterOpts.nodeGroups !== undefined) {
// this.nodeGroups = [];
// for (const ng of clusterOpts.nodeGroups) {
// const ngOptions: eks.NodeGroupOptions =
// {
// instanceProfile: getRoleAndInstanceProfile.InstanceProfile,
// ...ng.options
// }
// ;
// this.nodeGroups.push(
// this.eksCluster.createNodeGroup(ng.name, ngOptions)
// );
// }
// }
}
}
white-balloon-205
chilly-photographer-60932
07/05/2019, 8:02 PMk apply -f /var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-15096Gbdaqtm0KDHA.tmp
<http://clusterrole.rbac.authorization.k8s.io/aws-node|clusterrole.rbac.authorization.k8s.io/aws-node> unchanged
serviceaccount/aws-node unchanged
<http://clusterrolebinding.rbac.authorization.k8s.io/aws-node|clusterrolebinding.rbac.authorization.k8s.io/aws-node> unchanged
daemonset.apps/aws-node unchanged
<http://customresourcedefinition.apiextensions.k8s.io/eniconfigs.crd.k8s.amazonaws.com|customresourcedefinition.apiextensions.k8s.io/eniconfigs.crd.k8s.amazonaws.com> unchanged
from server for: "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-15096Gbdaqtm0KDHA.tmp": Unauthorized)
error: You must be logged in to the server (error when retrieving current configuration of:
Resource: "<http://apiextensions.k8s.io/v1beta1|apiextensions.k8s.io/v1beta1>, Resource=customresourcedefinitions", GroupVersionKind: "<http://apiextensions.k8s.io/v1beta1|apiextensions.k8s.io/v1beta1>, Kind=CustomResourceDefinition"
Name: "<http://eniconfigs.crd.k8s.amazonaws.com|eniconfigs.crd.k8s.amazonaws.com>", Namespace: ""
Object: &{map["apiVersion":"<http://apiextensions.k8s.io/v1beta1|apiextensions.k8s.io/v1beta1>" "kind":"CustomResourceDefinition" "metadata":map["annotations":map["<http://kubectl.kubernetes.io/last-applied-configuration|kubectl.kubernetes.io/last-applied-configuration>":""] "name":"<http://eniconfigs.crd.k8s.amazonaws.com|eniconfigs.crd.k8s.amazonaws.com>"] "spec":map["group":"<http://crd.k8s.amazonaws.com|crd.k8s.amazonaws.com>" "names":map["kind":"ENIConfig" "plural":"eniconfigs" "singular":"eniconfig"] "scope":"Cluster" "versions":[map["name":"v1alpha1" "served":%!q(bool=true) "storage":%!q(bool=true)]]]]}
from server for: "/var/folders/6x/1nwydqkn1kn0n2wqmwwp1hsmvm6qls/T/tmp-15096Gbdaqtm0KDHA.tmp": Unauthorized)
white-balloon-205
kubeconfig
that the Pulumi deployment is trying to use and try to manually use that from outside of Pulumi.chilly-photographer-60932
07/05/2019, 8:05 PMnaveen
. But the local export export AWS_DEFAULT_PROFILE=naveen
was not set before. That was causing the error.white-balloon-205
chilly-photographer-60932
07/05/2019, 8:13 PMaws:profile
which was in the yaml
white-balloon-205
thewhich was in theaws:profile
yaml
chilly-photographer-60932
07/05/2019, 8:16 PM