From a03faaf75c4da6a1ff60e5a1132be5ae2026c112 Mon Sep 17 00:00:00 2001 From: Radek Pazdera Date: Wed, 29 Nov 2017 12:33:32 +0000 Subject: [PATCH] Adding upload algorithms to the s3 deploy method --- lib/deploy-methods/s3.js | 174 ++++++++++++++++++++++++++------------- package.json | 1 + 2 files changed, 120 insertions(+), 55 deletions(-) diff --git a/lib/deploy-methods/s3.js b/lib/deploy-methods/s3.js index 6c5de81..92fe292 100644 --- a/lib/deploy-methods/s3.js +++ b/lib/deploy-methods/s3.js @@ -3,63 +3,113 @@ const aws = require('aws-sdk'), stream = require('stream'), config = require('../config'), Release = require('../data').Release, - mime = require('mime-types'); + mime = require('mime-types'), + tmp = require('tmp'), + childProcess = require('child_process'); -function _doRelease(build, target) { +function _doRelease(build, target, algorithm) { + let s3 = new aws.S3(); + return s3.getObject({ + Bucket: config.local.rootBucket.name, + Key: build.path + }).promise().then((data) => { + if (algorithm === 'clear' || algorithm === 'overwrite') { + return _doReleaseByUpload(target, data); + } else if (algorithm === 'sync') { + return _doReleaseBySync(build, target, data); + } + }, (err) => { + throw new Error('Failed to download build archive: ' + err); + }).then(() => { + let release = new Release(build); + release.updateReleaseDate(); + + return release; + }); +} + +function _doReleaseByUpload(target, data) { return new Promise((resolve, reject) => { - let s3 = new aws.S3(); - s3.getObject( - { - Bucket: config.local.rootBucket.name, - Key: build.path - }, - (err, data) => { - if (err) { - return reject('Failed to download build archive: ' + err); - } - - let rs = new stream.Readable(), - targz = new TarGz(), - parse = targz.createParseStream(); - - rs._read = function () {} - rs.push(data.Body); - rs.push(null); - - parse.on('entry', (entry) => { - if (entry.type === 'File') { - let p = new stream.PassThrough(); - entry.pipe(p); - - s3.upload( - { - Bucket: target, - Key: entry.path, - Body: p, - ACL: 'public-read', - CacheControl: 'max-age=600', - ContentType: mime.lookup(entry.path) || 'application/octet-stream' - }, - (err, data) => { - if (err) { - return reject('Failed deploying ' + entry.path + ':' + err); - } - } - ); + let s3 = new aws.S3(), + rs = new stream.Readable(), + targz = new TarGz(), + parse = targz.createParseStream(); + + rs._read = function () {} + rs.push(data.Body); + rs.push(null); + + parse.on('entry', (entry) => { + if (entry.type === 'File') { + let p = new stream.PassThrough(); + entry.pipe(p); + + s3.upload( + { + Bucket: target, + Key: entry.path, + Body: p, + ACL: 'public-read', + CacheControl: 'max-age=600', + ContentType: mime.lookup(entry.path) || 'application/octet-stream' + }, + (err, data) => { + if (err) { + reject('Failed deploying ' + entry.path + ':' + err); + } } + ); + } + }); + + rs.pipe(parse); + + parse.on('finish', () => { + resolve(); + }); + }); +} + +function _doReleaseBySync(build, target, data) { + return new Promise((resolve, reject) => { + let s3 = new aws.S3(), + rs = new stream.Readable(), + targz = new TarGz(), + write; + + rs._read = function () {} + rs.push(data.Body); + rs.push(null); + + tmp.dir({unsafeCleanup: true}, (err, path, cleanupCb) => { + if (err) { + return reject('Failed to create a temporary directory: ' + err); + } + + try { + write = targz.createWriteStream(path); + rs.pipe(write); + + write.on('finish', () => { + childProcess.exec(`aws s3 sync ${path} s3://${target}`, (err, stdout, stderr) => { + if (err) { + cleanupCb(); + return reject('Sync failed: ' + err); + } + + cleanupCb(); + resolve(); + console.log('SYNCED!', stdout, stderr); + }); }); - - rs.pipe(parse); - - parse.on('finish', () => { - let release = new Release(build); - release.updateReleaseDate(); - - resolve(release); - }); + } catch (error) { + cleanupCb(); + throw error; } - ); + }); + + resolve(); }); } @@ -98,7 +148,7 @@ function _downloadKartFile(project, channel) { } return reject('Failed downloading kart.json: ' + err); } - + resolve(new Release(JSON.parse(data.Body.toString()))); } ); @@ -120,7 +170,7 @@ function _deleteEntries(bucketName, entries) { if (err) { return reject('Failed to remove ' + entry.Key + ': ' + err); } - + resolve(entry.Key); } ); @@ -150,15 +200,29 @@ function _clearBucket(bucketName) { }); } +/** + * Release a build. + * + * Uploading algorithms: + * * clear: Clears the target bucket before writing the build into it. + * * overwrite: Writes all the files into the bucked without clearing it out first. + * * sync: Uses the `aws sync` command to minimise bandwith usage. + * + * @param {Object} build The build object to be released. + * @param {Object} opts Options. + * @param {String} opts.bucket Name of the bucket to deploy to. + * @param {String} opts.algorithm Either 'clear', 'overwrite' or 'sync'. + */ function release(build, opts) { let bucket = opts.bucket, + algorithm = opts.algorithm || 'clear', releaseObject; return build.fetchMetadata() .then(() => { - return _clearBucket(bucket) + return algorithm === 'clear' ? _clearBucket(bucket) : Promise.resolve(); }).then(() => { - return _doRelease(build, bucket); + return _doRelease(build, bucket, algorithm); }).then((r) => { releaseObject = r; _uploadKartFile(releaseObject, bucket); diff --git a/package.json b/package.json index e62b61f..cd404b9 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,7 @@ "mime-types": "^2.1.17", "semver": "^5.4.1", "tar.gz": "^1.0.5", + "tmp": "0.0.33", "update-notifier": "^2.3.0", "yargs": "^8.0.2" }