green-daybreak-98726
07/30/2021, 10:34 PMlittle-whale-73288
08/04/2021, 7:56 AMbumpy-laptop-30846
08/04/2021, 1:43 PMancient-eve-13947
08/04/2021, 4:06 PMfunction uploadBinaries() {
const storageAccount = new storage.StorageAccount("deployments", {
resourceGroupName: resourceGroupName,
kind: storage.Kind.StorageV2,
sku: {
name: storage.SkuName.Standard_LRS,
},
});
const storageContainer = new storage.BlobContainer("binaries", {
resourceGroupName: resourceGroupName,
accountName: storageAccount.name,
publicAccess: storage.PublicAccess.None,
});
const blobs= filesystem.readdirSync(cfg.require("PackagePath")).map(archive => new storage.Blob(archive, {
resourceGroupName: resourceGroupName,
accountName: storageAccount.name,
containerName: storageContainer.name,
source: new pulumi.asset.FileAsset(archive)
}));
const urls= pulumi.all(blobs).apply(bs => bs.map(b => getUrl(b)));
function getUrl(blob: storage.Blob) : pulumi.Output<string> {
return pulumi.all(
[storageAccount.name, storageContainer.name, resourceGroupName, blob.name, blob.id]).apply(
([accountName, containerName, rgName, blobName]) => getSASToken(accountName, containerName, rgName, blobName));
}
function getSASToken(storageAccountName: string, storageContainerName: string, resourceGroupName: string, blobName: string): pulumi.Output<string> {
const blobSAS = storage.listStorageAccountServiceSAS({
accountName: storageAccountName,
protocols: storage.HttpProtocol.Https,
sharedAccessStartTime: format(new Date()),
sharedAccessExpiryTime: format(nextYear()),
resource: storage.SignedResource.C,
resourceGroupName: resourceGroupName,
permissions: storage.Permissions.R,
canonicalizedResource: "/blob/" + storageAccountName + "/" + storageContainerName,
contentType: "application/json",
cacheControl: "max-age=5",
contentDisposition: "inline",
contentEncoding: "deflate",
});
return pulumi.interpolate `https://${storageAccountName}.<http://blob.core.windows.net/${storageContainerName}/${blobName}?${blobSAS.then(x|blob.core.windows.net/${storageContainerName}/${blobName}?${blobSAS.then(x> => x.serviceSasToken)}`;
function format(when: Date){
const year= when.getFullYear();
const month= 1+when.getMonth();
const day= 1+when.getDay();
return `${year}-${pad(month)}-${pad(day)}`;
function pad(n: number){
return n.toString().padStart(2, '0');
}
}
function nextYear():Date {
const result= new Date();
result.setFullYear(result.getFullYear()+1);
return result;
}
}
}
the problem is that urls
is now of type Output<Output<string>[]>
instead of Output<string[]>
. How do I get there?
related to that, I will actually need a mapping of archive=>sas-url. This I would create once I got the flattened output, but I thought to mention it in case it has any bearing.busy-journalist-6936
08/05/2021, 12:09 AMbillions-xylophone-85957
08/05/2021, 2:18 PMpreview
renders it?careful-beard-19872
08/09/2021, 5:19 PMlemon-vase-32091
08/10/2021, 7:39 PMk8s.helm.v3.Chart
with a subchart (kube-prometheus-stack in particular).
I am struggling to pass values to a subchart (kube-state-metrics) that is named in kebab-case.
As TS wouldn't accept this as a key I tried to pass it quoted, but this doesn't seen to work
Is there a magic syntax I am missing here? Thanks!
'kube-state-metrics': {
collectors: {
certificatesigningrequests: false,
mutatingwebhookconfigurations: false,
},
}
broad-gold-44713
08/11/2021, 4:43 PMaws.lambda.CallbackFunction
on windows packages up the files with \
as the directory separator, and that doesn't go down well with AWS Lambda. Is there a way to change the path separator?broad-gold-44713
08/12/2021, 1:38 AMaws.lambda.CallbackFunction
I have a bit of code that uses a decorator, but the default transpiler can't handle it. Is there a way to use tsc
or babel
to do the transpiling correctly?many-salesmen-89069
08/12/2021, 7:25 AMrefined-terabyte-65361
08/12/2021, 8:04 PMconst accessLogsBucket = new aws.s3.Bucket(`${project}-access-logs`);
const BucketPublicAccessBlock = new aws.s3.BucketPublicAccessBlock("BucketPublicAccessBlock", {
bucket: accessLogsBucket.id,
blockPublicAcls: true,
blockPublicPolicy: true,
});
bucket gets created but its gets id attached to it like this
dev-access-logs-cf9291d
how to stop getting that id for bucket nameancient-monkey-64322
08/13/2021, 12:09 PMLocalWorkspace.removeStack
?
The typescript compiler thinks Property 'removeStack' does not exist on type 'typeof LocalWorkspace'.ts(2339)
and when I run my automation I’m hitting the runtime error TypeError: automation_1.LocalWorkspace.removeStack is not a function
refined-terabyte-65361
08/16/2021, 4:41 PMCalling [toString] on an [Output<T>] is not supported.
o get the value of an Output<T> as an Output<string> consider
code :
const accessLogsBucket = new aws.s3.Bucket(dev-access-logs,{bucket:dev-access-logs});
const accessbucketPolicy = new aws.s3.BucketPolicy(`${accessLogsBucket.id}`, {
bucket: accessLogsBucket.bucket,
policy: accessLogsBucket.bucket.apply(accessLogsBucketPolicy)
})
purple-beach-36424
08/16/2021, 7:06 PMerror: 1 error occurred:
* error configuring Terraform AWS Provider: no valid credential sources for Terraform AWS Provider found.
Please see <https://registry.terraform.io/providers/hashicorp/aws>
for more information about providing credentials.
Error: NoCredentialProviders: no valid providers in chain. Deprecated.
For verbose messaging see aws.Config.CredentialsChainVerboseErrors
Some notes:
1. The NoCredentialProviders
part is impossible since the state is pushed into an s3 bucket in the same account and everything works fine (was able to run pulumi login
& pulumi stack select
prior to the pulumi preview
I’m currently trying to run)
2. The credentials are provided by a role annotation on a k8s runner who has access to the aws account
3. Tried downgrading AND upgrading libs (@pulumi/pulumi
, @pulumi/aws
& @pulumi/awsx
) because of known past issues with aws providers
4. I tried configuring the provider the most explicit way possible and it still doesn’t work
new aws.Provider(name, {
skipCredentialsValidation: false,
skipMetadataApiCheck: false,
skipGetEc2Platforms: false,
skipRequestingAccountId: false,
skipRegionValidation: false,
region: this.region,
});
I don’t know where to look next to understand why this doesn’t work as expected.
Thank you in advance for any feedback.rich-cat-16319
08/17/2021, 5:34 AM"$ref": "/aws/v4.0.0/schema.json#/resources/aws:s3%2Fbucket:Bucket"
that is in the schema.json
Does anyone know where can I find the same reference but for the azure-native:web:WebApp
?dazzling-island-38975
08/17/2021, 7:14 AMancient-eve-13947
08/17/2021, 11:35 AMancient-eve-13947
08/17/2021, 12:33 PMancient-eve-13947
08/17/2021, 5:57 PMazure.types.input.web.NameValuePairArgs[]
to pulumi.Input<{
[key: string]: pulumi.Input<string>;
}>
?refined-terabyte-65361
08/18/2021, 4:41 AMerror: 1 error occurred:
* Error putting S3 policy: OperationAborted: A conflicting conditional operation is currently in progress against this resource. Please try again.
status code: 409, request id: 06R8HNWN81YD21RA, host id: ALhssd0pVm6CT6jM9ge68cfGfY9yOmhSU/CTsogFNKxS4DyFsDgQj6yhcMO=
ancient-eve-13947
08/18/2021, 10:37 AMnvp.Name
(OutputInstance<string>
, Promise<string>,
undefined
) "cannot be used as an index type".ancient-eve-13947
08/18/2021, 1:04 PMkubernetes.apps.v1.Deployment
comes from an Output<string>
, how can I do that? it only accepts a string
, not an Output<string>
. I found this, but am unclear how to use it.refined-terabyte-65361
08/18/2021, 5:47 PMimport * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
import * as awsx from "@pulumi/awsx";
// Create an AWS resource (S3 Bucket)
const bucketName = "cent-serv-logs";
const accessLogsBucket = new aws.s3.Bucket(`${bucketName}`, {
bucket: `${bucketName}`,
});
const accessBucketPublicAccessBlock = new aws.s3.BucketPublicAccessBlock(
"accessBucketPublicAccessBlock",
{
bucket: accessLogsBucket.id,
blockPublicAcls: true,
blockPublicPolicy: true,
ignorePublicAcls: true,
restrictPublicBuckets: true,
}
);
const accessbucketPolicy = new aws.s3.BucketPolicy(
`central-server-access-logs-policy`,
{
bucket: accessLogsBucket.bucket,
policy: accessLogsBucket.bucket.apply(accessLogsBucketPolicy),
}
);
function accessLogsBucketPolicy(bucketName: string) {
return JSON.stringify({
Version: "2012-10-17",
Id: "AWSConsole-AccessLogs-Policy-16287xxxx",
Statement: [
{
Sid: "AWSConsoleStmt-16xxxx",
Effect: "Allow",
Principal: {
AWS: "arn:aws:iam::79787xxx:root",
},
Action: "s3:PutObject",
Resource: `arn:aws:s3:::${bucketName}/*`,
},
{
Sid: "AWSLogDeliveryWrite",
Effect: "Allow",
Principal: {
Service: "<http://delivery.logs.amazonaws.com|delivery.logs.amazonaws.com>",
},
Action: "s3:PutObject",
Resource: `arn:aws:s3:::${bucketName}/*`,
Condition: {
StringEquals: {
"s3:x-amz-acl": "bucket-owner-full-control",
},
},
},
{
Sid: "AWSLogDeliveryAclCheck",
Effect: "Allow",
Principal: {
Service: "<http://delivery.logs.amazonaws.com|delivery.logs.amazonaws.com>",
},
Action: "s3:GetBucketAcl",
Resource: `arn:aws:s3:::${bucketName}`,
},
],
});
}
error:
error: 1 error occurred:
* Error putting S3 policy: OperationAborted: A conflicting conditional operation is currently in progress against this resource. Please try again.
status code: 409,
used example from here
https://www.pulumi.com/docs/aws/s3/#create-an-aws-s3-resource-using-pulumiawsprehistoric-kite-30979
08/24/2021, 12:07 PMaws.lambda.CallbackFunction
?many-salesmen-89069
08/25/2021, 7:57 AMastonishing-tiger-81216
08/26/2021, 1:06 PMexport interface SecurityGroupRuleLocation {
cidrBlocks?: pulumi.Input<pulumi.Input<string>[]>;
ipv6CidrBlocks?: pulumi.Input<pulumi.Input<string>[]>;
sourceSecurityGroupId?: pulumi.Input<string>;
}
awsx.ec2.SecurityGroupRule.ingress(
'instance-sg',
InstanceSg,
InstanceSg.id, // Type 'Output ' has no properties in common with type 'SecurityGroupRuleLocation'.
new awsx.ec2.TcpPorts(8080),
)
refined-terabyte-65361
08/26/2021, 8:59 PMred-account-16888
08/27/2021, 3:29 PMrefined-terabyte-65361
09/02/2021, 7:20 PMrefined-terabyte-65361
09/02/2021, 7:20 PMgreen-stone-37839
09/02/2021, 7:24 PMlittle-cartoon-10569
09/02/2021, 8:12 PMget()
that will load an unmanaged resource object.