Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft: Issue #20 Update to use AWS S3 SDK V3 #22

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
166 changes: 81 additions & 85 deletions lib/index.js
Original file line number Diff line number Diff line change
@@ -1,122 +1,118 @@
"use strict";
const AWS = require('aws-sdk');
const URI = require('urijs');
const crypto = require('crypto');
import { S3, PutObjectCommand } from '@aws-sdk/client-s3';
import { URL } from 'url';

class FileLocationConverter {
config = {};

constructor(config) {
this.config = config;
}

getKey(file) {
const filename = `${file.hash}${file.ext}`;
if (!this.config.directory) return filename;
if (!this.config.directory) {
return filename;
}
return `${this.config.directory}/${filename}`;
}

getUrl(data) {
if (!this.config.cdn) return data.Location;
var parts = {};
URI.parseHost(this.config.cdn, parts);
parts.protocol = "https"; // Force https
parts.path = data.Key;
return URI.build(parts);
if (!this.config.cdn) {
return data.Location;
}
const url = new URL(this.config.cdn);
url.protocol = 'https';
url.pathname = data.Key;
return url.toString();
}
}

module.exports = {
provider: "do",
name: "Digital Ocean Spaces",
const ACL = 'public-read';
const CacheControl = 'public, max-age=31536000, immutable';
let s3;
let converter;
let pluginConfig = {};

async function uploadFile (file) {
const bucketParams = {
Bucket: pluginConfig.Spaces,
Key: converter.getKey(file),
Body: Buffer.from(file.buffer, 'binary'),
ACL,
CacheControl,
ContentType: file.mime
};

await s3.send(new PutObjectCommand(bucketParams));
}

async function uploadStream (file) {
const bucketParams = {
Bucket: pluginConfig.Spaces,
Key: converter.getKey(file),
Body: file.stream,
ACL,
CacheControl,
ContentType: file.mime
};

await s3.send(new PutObjectCommand(bucketParams));
}

async function deleteFile (file) {
return s3.deleteObject({
Bucket: pluginConfig.Spaces,
Key: converter.getKey(file)
});
}

export default {
provider: 'do',
name: 'Digital Ocean Spaces',
auth: {
key: {
label: "Key",
type: "text"
label: 'Key',
type: 'text'
},
secret: {
label: "Secret",
type: "text"
label: 'Secret',
type: 'text'
},
endpoint: {
label: "Endpoint (e.g. 'fra1.digitaloceanspaces.com')",
type: "text",
label: 'Endpoint (e.g. \'fra1.digitaloceanspaces.com\')',
type: 'text',
},
cdn: {
label: "CDN Endpoint (Optional - e.g. 'https://cdn.space.com')",
type: "text",
label: 'CDN Endpoint (Optional - e.g. \'https://cdn.space.com\')',
type: 'text',
},
space: {
label: "Space (e.g. myspace)",
type: "text",
label: 'Space (e.g. myspace)',
type: 'text',
},
directory: {
label: 'Directory (Optional - e.g. directory - place when you want to save files)',
type: 'text'
}
},
init: config => {
const endpoint = new AWS.Endpoint(config.endpoint);
const converter = new FileLocationConverter(config);

const S3 = new AWS.S3({
endpoint: endpoint,
accessKeyId: config.key,
secretAccessKey: config.secret,
params: {
ACL: 'public-read',
Bucket: config.space,
CacheControl: 'public, max-age=31536000, immutable'
},
});
init (config) {
pluginConfig = config;
converter = new FileLocationConverter(config);

const upload = file => new Promise((resolve, reject) => {
//--- Compute the file key.
file.hash = crypto.createHash('md5').update(file.hash).digest("hex");

//--- Upload the file into the space (technically the S3 Bucket)
S3.upload({
Key: converter.getKey(file),
Body: Buffer.from(file.buffer, "binary"),
ContentType: file.mime
},

//--- Callback handler
(err, data) => {
if (err) return reject(err);
file.url = converter.getUrl(data);
delete file.buffer;
resolve();
});
s3 = new S3({
endpoint: config.endpoint,
region: undefined,
credentials: {
accessKeyId: config.key,
secretAccessKey: config.secret
}
});

return {
upload,

uploadStream: file => new Promise((resolve, reject) => {
const _buf = [];

file.stream.on('data', chunk => _buf.push(chunk));
file.stream.on('end', () => {
file.buffer = Buffer.concat(_buf);
resolve(upload(file));
});
file.stream.on('error', err => reject(err));
}),

delete: file => new Promise((resolve, reject) => {

//--- Delete the file from the space
S3.deleteObject({
Bucket: config.bucket,
Key: converter.getKey(file),
},

//--- Callback handler
(err, data) => {
if (err) return reject(err);
else resolve();
})
}
)
}
upload: uploadFile,
uploadStream: uploadStream,
delete: deleteFile
};
}
}
};