Skip to content
This repository has been archived by the owner on Feb 1, 2022. It is now read-only.

Commit

Permalink
feat: enable pulling from multiple buckets (#14)
Browse files Browse the repository at this point in the history
  • Loading branch information
robinmetral authored Mar 4, 2020
1 parent 0c946a4 commit 9638294
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 27 deletions.
26 changes: 23 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,20 +27,40 @@ require("dotenv").config({
module.exports = {
plugins: [
{
resolve: "gatsby-source-s3",
resolve: `@robinmetral/gatsby-source-s3`,
options: {
aws: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: "eu-central-1"
region: process.env.AWS_REGION
},
buckets: ["my-bucket-name", "other-bucket"]
buckets: ["my-bucket", "my-second-bucket"]
}
}
]
};
```

Currently, your buckets will need to be configured for public access with this
access policy: (add your bucket name under `Statement.Resource`)

```json
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "AllowPublicRead",
"Effect": "Allow",
"Principal": {
"AWS": "*"
},
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::my-bucket/*"
}
]
}
```

## Query

S3 objects can be queried in GraphQL as "s3Object" of "allS3Object":
Expand Down
63 changes: 39 additions & 24 deletions src/gatsby-node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ type pluginOptionsType = {
buckets: string[];
};

type ObjectType = AWS.S3.Object & { Bucket: string };

// source all objects from s3
export async function sourceNodes(
{ actions: { createNode }, createNodeId, createContentDigest, reporter },
Expand All @@ -26,42 +28,55 @@ export async function sourceNodes(
// get objects
const s3 = new AWS.S3();

try {
const listObjects = async bucket => {
// todo improve this call
// see https://stackoverflow.com/a/49888947
const response = await s3
.listObjectsV2({
// todo handle several buckets
Bucket: buckets[0]
Bucket: bucket
// todo handle continuation token
// ContinuationToken: token,
})
.promise();

// add bucket key
const objects = response.Contents?.reduce((acc: ObjectType[], cur) => {
const object: ObjectType = { ...cur, Bucket: bucket };
acc.push(object);
return acc;
}, []);

return objects;
};

try {
const objects = await Promise.all(
buckets.map(bucket => listObjects(bucket))
);

// create file nodes
// todo touch nodes if they exist already
response.Contents &&
response.Contents.forEach(async object => {
const { Key } = object;
const { region } = awsConfig;
const node = {
// node meta
id: createNodeId(`s3-object-${object.Key}`),
parent: null,
children: [],
internal: {
type: "S3Object",
content: JSON.stringify(object),
contentDigest: createContentDigest(object)
},
// s3 object data
Url: `https://s3.${region ? `${region}.` : ""}amazonaws.com/${
buckets[0]
}/${Key}`,
...object
};
createNode(node);
objects?.flat().forEach(async object => {
const { Key, Bucket } = object;
const { region } = awsConfig;

createNode({
...object,
// construct url
Url: `https://s3.${
region ? `${region}.` : ""
}amazonaws.com/${Bucket}/${Key}`,
// node meta
id: createNodeId(`s3-object-${Key}`),
parent: null,
children: [],
internal: {
type: "S3Object",
content: JSON.stringify(object),
contentDigest: createContentDigest(object)
}
});
});
} catch (error) {
reporter.error(error);
}
Expand Down

0 comments on commit 9638294

Please sign in to comment.