purple-plumber-90981
05/20/2021, 3:13 AM# create cluster resource
eks_cluster = aws.eks.Cluster("itplat-eks-cluster", opts=provider_opts, **eks_cluster_config)
k8s_use1_provider = k8s.Provider(
k8s_use1_provider_name,
cluster=eks_cluster.arn,
context=eks_cluster.arn,
enable_dry_run=None,
namespace=None,
render_yaml_to_directory=None,
suppress_deprecation_warnings=None,
)
# lets have a go at creating a "crossplane-system" namespace
crossplane_namespace = k8s.core.v1.Namespace(
"crossplane-system", opts=pulumi.ResourceOptions(provider=k8s_use1_provider), metadata=k8s.meta.v1.ObjectMetaArgs(name="crossplane-system")
)
this makes namespace dependent on provider which is dependent on eks_cluster ??billowy-army-68599
purple-plumber-90981
05/20/2021, 6:41 AMaws eks --region us-east-1 update-kubeconfig --name <my_new_cluster>
error: configured Kubernetes cluster is unreachable: unable to load schema information from the API server
billowy-army-68599
purple-plumber-90981
05/20/2021, 11:24 PMk8s_use1_provider = k8s.Provider(
k8s_use1_provider_name,
cluster=eks_cluster.arn,
context=eks_cluster.arn,
enable_dry_run=None,
kubeconfig=utils.generate_kube_config(eks_cluster),
namespace=None,
render_yaml_to_directory=None,
suppress_deprecation_warnings=None,
)
# lets have a go at creating a "crossplane-system" namespace
crossplane_namespace = k8s.core.v1.Namespace(
"crossplane-system", opts=pulumi.ResourceOptions(provider=k8s_use1_provider), metadata=k8s.meta.v1.ObjectMetaArgs(name="crossplane-system")
)
leaves me with weirdness :-
Diagnostics:
kubernetes:core/v1:Namespace (crossplane-system):
error: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:52392: connect: connection refused"
pulumi:pulumi:Stack (aws_eks-itplat-aws-eks):
panic: runtime error: invalid memory address or nil pointer dereference
[signal SIGSEGV: segmentation violation code=0x1 addr=0x10 pc=0x291f5c2]
goroutine 19 [running]:
<http://github.com/pulumi/pulumi-kubernetes/provider/v3/pkg/provider.getActiveClusterFromConfig(0xc0004804e0|github.com/pulumi/pulumi-kubernetes/provider/v3/pkg/provider.getActiveClusterFromConfig(0xc0004804e0>, 0xc00047f620, 0xc0000ddd00)
/home/runner/work/pulumi-kubernetes/pulumi-kubernetes/provider/pkg/provider/util.go:118 +0xe2
<http://github.com/pulumi/pulumi-kubernetes/provider/v3/pkg/provider.(*kubeProvider).DiffConfig(0xc00071ac30|github.com/pulumi/pulumi-kubernetes/provider/v3/pkg/provider.(*kubeProvider).DiffConfig(0xc00071ac30>, 0x3105be8, 0xc00047f5c0, 0xc0005fccb0, 0xc00071ac30, 0x2b0b201, 0xc0006eebc0)
/home/runner/work/pulumi-kubernetes/pulumi-kubernetes/provider/pkg/provider/provider.go:345 +0xcb8
<http://github.com/pulumi/pulumi/sdk/v3/proto/go._ResourceProvider_DiffConfig_Handler.func1(0x3105be8|github.com/pulumi/pulumi/sdk/v3/proto/go._ResourceProvider_DiffConfig_Handler.func1(0x3105be8>, 0xc00047f5c0, 0x2cb14e0, 0xc0005fccb0, 0x2cc6a00, 0x41623c8, 0x3105be8, 0xc00047f5c0)
/home/runner/go/pkg/mod/github.com/pulumi/pulumi/sdk/v3@v3.1.0/proto/go/provider.pb.go:2158 +0x89
<http://github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc.OpenTracingServerInterceptor.func1(0x3105be8|github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc.OpenTracingServerInterceptor.func1(0x3105be8>, 0xc00047f140, 0x2cb14e0, 0xc0005fccb0, 0xc000448380, 0xc00096c510, 0x0, 0x0, 0x30bfc20, 0xc00054dbd0)
/home/runner/go/pkg/mod/github.com/grpc-ecosystem/grpc-opentracing@v0.0.0-20180507213350-8e809c8a8645/go/otgrpc/server.go:57 +0x30a
<http://github.com/pulumi/pulumi/sdk/v3/proto/go._ResourceProvider_DiffConfig_Handler(0x2d4d5e0|github.com/pulumi/pulumi/sdk/v3/proto/go._ResourceProvider_DiffConfig_Handler(0x2d4d5e0>, 0xc00071ac30, 0x3105be8, 0xc00047f140, 0xc000480420, 0xc000449280, 0x3105be8, 0xc00047f140, 0xc00093b500, 0x950)
/home/runner/go/pkg/mod/github.com/pulumi/pulumi/sdk/v3@v3.1.0/proto/go/provider.pb.go:2160 +0x150
<http://google.golang.org/grpc.(*Server).processUnaryRPC(0xc0000ff6c0|google.golang.org/grpc.(*Server).processUnaryRPC(0xc0000ff6c0>, 0x3121b58, 0xc00048ac00, 0xc000152000, 0xc00060d8f0, 0x41001f0, 0x0, 0x0, 0x0)
/home/runner/go/pkg/mod/google.golang.org/grpc@v1.34.0/server.go:1210 +0x52b
<http://google.golang.org/grpc.(*Server).handleStream(0xc0000ff6c0|google.golang.org/grpc.(*Server).handleStream(0xc0000ff6c0>, 0x3121b58, 0xc00048ac00, 0xc000152000, 0x0)
/home/runner/go/pkg/mod/google.golang.org/grpc@v1.34.0/server.go:1533 +0xd0c
<http://google.golang.org/grpc.(*Server).serveStreams.func1.2(0xc00079a030|google.golang.org/grpc.(*Server).serveStreams.func1.2(0xc00079a030>, 0xc0000ff6c0, 0x3121b58, 0xc00048ac00, 0xc000152000)
/home/runner/go/pkg/mod/google.golang.org/grpc@v1.34.0/server.go:871 +0xab
created by <http://google.golang.org/grpc.(*Server).serveStreams.func1|google.golang.org/grpc.(*Server).serveStreams.func1>
/home/runner/go/pkg/mod/google.golang.org/grpc@v1.34.0/server.go:869 +0x1fd
billowy-army-68599
purple-plumber-90981
05/20/2021, 11:55 PM<https://github.com/pulumi/examples/blob/master/aws-py-eks/utils.py>
error: configured Kubernetes cluster is unreachable: unable to load Kubernetes client configuration from kubeconfig file: context "arn:aws:eks:us-east-1:69999999991:cluster/itplat-eks-cluster" does not exist
billowy-army-68599
purple-plumber-90981
05/21/2021, 1:56 AM# create cluster resource
eks_cluster = aws.eks.Cluster(
"itplat-eks-cluster",
name="itplat-eks-cluster",
# create k8s providers allowing us to switch clusters/contexts
k8s_use1_provider_name = "k8s_provider_use1"
k8s_use1_provider = k8s.Provider(
k8s_use1_provider_name,
cluster=eks_cluster.arn,
context=eks_cluster.arn,
enable_dry_run=None,
kubeconfig=utils.generate_kube_config(eks_cluster),
namespace=None,
render_yaml_to_directory=None,
suppress_deprecation_warnings=None,
)
billowy-army-68599
context=
propertykubeconfig=
purple-plumber-90981
05/24/2021, 2:35 AM"""An AWS Python Pulumi program"""
import json
import pulumi
import pulumi_kubernetes as k8s
import pulumi_aws as aws
import utils
from pulumi import ResourceOptions
eks_cluster = aws.eks.Cluster(
"test-cluster",
name="itplat-eks-cluster",
role_arn="arn:aws:iam::629205377521:role/itplat_eks_clusteradmin_role",
vpc_config={
"endpointPrivateAccess": True,
"endpointPublicAccess": False,
"securityGroupIds": ["sg-08a13f35d34ee1b7f"],
"subnet_ids": ["subnet-0b962f93a756f624b", "subnet-09cc1903498dc4474", "subnet-0e0ff2e030397c840"],
},
opts=ResourceOptions(import_='itplat-eks-cluster'),
)
k8s_use1_provider = k8s.Provider(
"test_provider",
kubeconfig=utils.generate_kube_config(eks_cluster),
)
# lets have a go at creating a "crossplane-system" namespace
crossplane_namespace = k8s.core.v1.Namespace(
"crossplane-system", opts=pulumi.ResourceOptions(provider=k8s_use1_provider), metadata=k8s.meta.v1.ObjectMetaArgs(name="crossplane-system")
)
billowy-army-68599
cluster=eks_cluster.arn,
context=eks_cluster.arn,
namespace=None,
render_yaml_to_directory=None,
suppress_deprecation_warnings=None,
and it now works?purple-plumber-90981
05/24/2021, 2:55 AMbillowy-army-68599
purple-plumber-90981
05/24/2021, 2:57 AMDiagnostics:
pulumi:pulumi:Stack (aws_eks-itplat-aws-eks):
panic: runtime error: invalid memory address or nil pointer dereference
[signal SIGSEGV: segmentation violation code=0x1 addr=0x10 pc=0x291f382]
goroutine 35 [running]:
<http://github.com/pulumi/pulumi-kubernetes/provider/v3/pkg/provider.getActiveClusterFromConfig(0xc00093eae0|github.com/pulumi/pulumi-kubernetes/provider/v3/pkg/provider.getActiveClusterFromConfig(0xc00093eae0>, 0xc0001d5980, 0x0)
/home/runner/work/pulumi-kubernetes/pulumi-kubernetes/provider/pkg/provider/util.go:118 +0xe2
<http://github.com/pulumi/pulumi-kubernetes/provider/v3/pkg/provider.(*kubeProvider).DiffConfig(0xc000535860|github.com/pulumi/pulumi-kubernetes/provider/v3/pkg/provider.(*kubeProvider).DiffConfig(0xc000535860>, 0x31057e8, 0xc0001d5950, 0xc0009ac850, 0xc000535860, 0x2b0af01, 0xc000616e80)
/home/runner/work/pulumi-kubernetes/pulumi-kubernetes/provider/pkg/provider/provider.go:344 +0xc8d
<http://github.com/pulumi/pulumi/sdk/v3/proto/go._ResourceProvider_DiffConfig_Handler.func1(0x31057e8|github.com/pulumi/pulumi/sdk/v3/proto/go._ResourceProvider_DiffConfig_Handler.func1(0x31057e8>, 0xc0001d5950, 0x2cb11c0, 0xc0009ac850, 0x2cc66e0, 0x4162348, 0x31057e8, 0xc0001d5950)
/home/runner/go/pkg/mod/github.com/pulumi/pulumi/sdk/v3@v3.0.0/proto/go/provider.pb.go:2158 +0x89
<http://github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc.OpenTracingServerInterceptor.func1(0x31057e8|github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc.OpenTracingServerInterceptor.func1(0x31057e8>, 0xc0001d5650, 0x2cb11c0, 0xc0009ac850, 0xc00004c9c0, 0xc000403290, 0x0, 0x0, 0x30bf880, 0xc0004444e0)
/home/runner/go/pkg/mod/github.com/grpc-ecosystem/grpc-opentracing@v0.0.0-20180507213350-8e809c8a8645/go/otgrpc/server.go:57 +0x30a
<http://github.com/pulumi/pulumi/sdk/v3/proto/go._ResourceProvider_DiffConfig_Handler(0x2d4d2c0|github.com/pulumi/pulumi/sdk/v3/proto/go._ResourceProvider_DiffConfig_Handler(0x2d4d2c0>, 0xc000535860, 0x31057e8, 0xc0001d5650, 0xc00093ea80, 0xc00010e040, 0x31057e8, 0xc0001d5650, 0xc0000d0000, 0x1056)
/home/runner/go/pkg/mod/github.com/pulumi/pulumi/sdk/v3@v3.0.0/proto/go/provider.pb.go:2160 +0x150
<http://google.golang.org/grpc.(*Server).processUnaryRPC(0xc00077f6c0|google.golang.org/grpc.(*Server).processUnaryRPC(0xc00077f6c0>, 0x3121758, 0xc000001680, 0xc0009cc100, 0xc00028c030, 0x4100170, 0x0, 0x0, 0x0)
/home/runner/go/pkg/mod/google.golang.org/grpc@v1.34.0/server.go:1210 +0x52b
<http://google.golang.org/grpc.(*Server).handleStream(0xc00077f6c0|google.golang.org/grpc.(*Server).handleStream(0xc00077f6c0>, 0x3121758, 0xc000001680, 0xc0009cc100, 0x0)
/home/runner/go/pkg/mod/google.golang.org/grpc@v1.34.0/server.go:1533 +0xd0c
<http://google.golang.org/grpc.(*Server).serveStreams.func1.2(0xc000601a10|google.golang.org/grpc.(*Server).serveStreams.func1.2(0xc000601a10>, 0xc00077f6c0, 0x3121758, 0xc000001680, 0xc0009cc100)
/home/runner/go/pkg/mod/google.golang.org/grpc@v1.34.0/server.go:871 +0xab
created by <http://google.golang.org/grpc.(*Server).serveStreams.func1|google.golang.org/grpc.(*Server).serveStreams.func1>
/home/runner/go/pkg/mod/google.golang.org/grpc@v1.34.0/server.go:869 +0x1fd
kubernetes:core/v1:Namespace (crossplane-system):
error: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:52449: connect: connection refused"
billowy-army-68599
pulumi up -r
purple-plumber-90981
05/24/2021, 2:59 AMbillowy-army-68599
purple-plumber-90981
05/24/2021, 3:02 AMbillowy-army-68599
cluster
and context
shouldn't have the arn set, those are incorrect valuespurple-plumber-90981
05/24/2021, 3:07 AMkubernetes:core/v1:Namespace (crossplane-system):
error: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:57683: connect: connection refused"