Skip to content

Commit

Permalink
Adding upload algorithms to the s3 deploy method
Browse files Browse the repository at this point in the history
  • Loading branch information
pazdera committed Nov 29, 2017
1 parent 125e22e commit a03faaf
Show file tree
Hide file tree
Showing 2 changed files with 120 additions and 55 deletions.
174 changes: 119 additions & 55 deletions lib/deploy-methods/s3.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,63 +3,113 @@ const aws = require('aws-sdk'),
stream = require('stream'),
config = require('../config'),
Release = require('../data').Release,
mime = require('mime-types');
mime = require('mime-types'),
tmp = require('tmp'),
childProcess = require('child_process');


function _doRelease(build, target) {
function _doRelease(build, target, algorithm) {
let s3 = new aws.S3();
return s3.getObject({
Bucket: config.local.rootBucket.name,
Key: build.path
}).promise().then((data) => {
if (algorithm === 'clear' || algorithm === 'overwrite') {
return _doReleaseByUpload(target, data);
} else if (algorithm === 'sync') {
return _doReleaseBySync(build, target, data);
}
}, (err) => {
throw new Error('Failed to download build archive: ' + err);
}).then(() => {
let release = new Release(build);
release.updateReleaseDate();

return release;
});
}

function _doReleaseByUpload(target, data) {
return new Promise((resolve, reject) => {
let s3 = new aws.S3();
s3.getObject(
{
Bucket: config.local.rootBucket.name,
Key: build.path
},
(err, data) => {
if (err) {
return reject('Failed to download build archive: ' + err);
}

let rs = new stream.Readable(),
targz = new TarGz(),
parse = targz.createParseStream();

rs._read = function () {}
rs.push(data.Body);
rs.push(null);

parse.on('entry', (entry) => {
if (entry.type === 'File') {
let p = new stream.PassThrough();
entry.pipe(p);

s3.upload(
{
Bucket: target,
Key: entry.path,
Body: p,
ACL: 'public-read',
CacheControl: 'max-age=600',
ContentType: mime.lookup(entry.path) || 'application/octet-stream'
},
(err, data) => {
if (err) {
return reject('Failed deploying ' + entry.path + ':' + err);
}
}
);
let s3 = new aws.S3(),
rs = new stream.Readable(),
targz = new TarGz(),
parse = targz.createParseStream();

rs._read = function () {}
rs.push(data.Body);
rs.push(null);

parse.on('entry', (entry) => {
if (entry.type === 'File') {
let p = new stream.PassThrough();
entry.pipe(p);

s3.upload(
{
Bucket: target,
Key: entry.path,
Body: p,
ACL: 'public-read',
CacheControl: 'max-age=600',
ContentType: mime.lookup(entry.path) || 'application/octet-stream'
},
(err, data) => {
if (err) {
reject('Failed deploying ' + entry.path + ':' + err);
}
}
);
}
});

rs.pipe(parse);

parse.on('finish', () => {
resolve();
});
});
}

function _doReleaseBySync(build, target, data) {
return new Promise((resolve, reject) => {
let s3 = new aws.S3(),
rs = new stream.Readable(),
targz = new TarGz(),
write;

rs._read = function () {}
rs.push(data.Body);
rs.push(null);

tmp.dir({unsafeCleanup: true}, (err, path, cleanupCb) => {
if (err) {
return reject('Failed to create a temporary directory: ' + err);
}

try {
write = targz.createWriteStream(path);
rs.pipe(write);

write.on('finish', () => {
childProcess.exec(`aws s3 sync ${path} s3://${target}`, (err, stdout, stderr) => {
if (err) {
cleanupCb();
return reject('Sync failed: ' + err);
}

cleanupCb();
resolve();
console.log('SYNCED!', stdout, stderr);
});
});

rs.pipe(parse);

parse.on('finish', () => {
let release = new Release(build);
release.updateReleaseDate();

resolve(release);
});
} catch (error) {
cleanupCb();
throw error;
}
);
});

resolve();
});
}

Expand Down Expand Up @@ -98,7 +148,7 @@ function _downloadKartFile(project, channel) {
}
return reject('Failed downloading kart.json: ' + err);
}

resolve(new Release(JSON.parse(data.Body.toString())));
}
);
Expand All @@ -120,7 +170,7 @@ function _deleteEntries(bucketName, entries) {
if (err) {
return reject('Failed to remove ' + entry.Key + ': ' + err);
}

resolve(entry.Key);
}
);
Expand Down Expand Up @@ -150,15 +200,29 @@ function _clearBucket(bucketName) {
});
}

/**
* Release a build.
*
* Uploading algorithms:
* * clear: Clears the target bucket before writing the build into it.
* * overwrite: Writes all the files into the bucked without clearing it out first.
* * sync: Uses the `aws sync` command to minimise bandwith usage.
*
* @param {Object} build The build object to be released.
* @param {Object} opts Options.
* @param {String} opts.bucket Name of the bucket to deploy to.
* @param {String} opts.algorithm Either 'clear', 'overwrite' or 'sync'.
*/
function release(build, opts) {
let bucket = opts.bucket,
algorithm = opts.algorithm || 'clear',
releaseObject;

return build.fetchMetadata()
.then(() => {
return _clearBucket(bucket)
return algorithm === 'clear' ? _clearBucket(bucket) : Promise.resolve();
}).then(() => {
return _doRelease(build, bucket);
return _doRelease(build, bucket, algorithm);
}).then((r) => {
releaseObject = r;
_uploadKartFile(releaseObject, bucket);
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
"mime-types": "^2.1.17",
"semver": "^5.4.1",
"tar.gz": "^1.0.5",
"tmp": "0.0.33",
"update-notifier": "^2.3.0",
"yargs": "^8.0.2"
}
Expand Down

0 comments on commit a03faaf

Please sign in to comment.