Skip to content

Commit

Permalink
Finishing basic functionality
Browse files Browse the repository at this point in the history
  • Loading branch information
pazdera committed Sep 8, 2017
1 parent 395acb2 commit 0d30d74
Show file tree
Hide file tree
Showing 3 changed files with 229 additions and 13 deletions.
230 changes: 221 additions & 9 deletions lib/index.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
const aws = require('aws-sdk');
const aws = require('aws-sdk'),
TarGz = require('tar.gz'),
semver = require('semver'),
stream = require('stream');

let s3 = null,
config = {
Expand Down Expand Up @@ -93,8 +96,62 @@ function archiveBuild(buildDir, projectName, channel, version, number, arch) {

function _doArchiveBuild(buildDir, projectName, channel, version, number, arch) {
return new Promise((resolve, reject) => {
// tar gz directory
// upload to s3
let tgz = new TarGz({}, {fromBase: true}),
stream;

stream = tgz.createReadStream(buildDir);

s3.upload(
{
Bucket: config.rootBucket.name,
Key: `${projectName}/${channel}/${projectName}_${version}-${number}_${arch}.tar.gz`,
Body: stream
},
(err, data) => {
if (err) {
return reject('Failed to upload archive' + err);
}

resolve({
project: projectName,
channel: channel,
path: data.Key,
version: version,
build: number,
arch: arch,
date: Date.new(),
url: data.Location
});
}
);
});
}

/*
* Remove a build from the archive by path or build object.
*
* @param {String, Object} build Either a direct path to build or a build object.
*
*/
function removeBuild(build) {
if (typeof build === 'string') {
build = {path: build};
}

return new Promise((resolve, reject) => {
s3.deleteObject(
{
Bucket: config.rootBucket.name,
Key: build.path
},
(err, data) => {
if (err) {
return reject('Failed to remove ' + path + ':' + err);
}

resolve();
}
);
});
}

Expand Down Expand Up @@ -130,11 +187,23 @@ function _getComparator(opts) {

while (res === 0 && keys.length) {
let key = keys.shift();

if (a[key] > b[key]) {
res = order > 0 ? 1 : -1;
} else if (a[key] < b[key]) {
res = order > 0 ? -1 : 1;

/* Exception: use semver to compare versions */
if (key === 'version') {
let ca = semver.clean(a[key]),
cb = semver.clean(b[key]);

if (semver.gt(ca, cb)) {
res = order > 0 ? 1 : -1;
} else if (semver.lt(ca, cb)) {
res = order > 0 ? -1 : 1;
}
} else {
if (a[key] > b[key]) {
res = order > 0 ? 1 : -1;
} else if (a[key] < b[key]) {
res = order > 0 ? -1 : 1;
}
}
}
}
Expand Down Expand Up @@ -214,8 +283,151 @@ function list(project, channel, opts) {
});
}

function _deleteEntries(bucketName, entries) {
let p = entries.map((entry) => {
return new Promise((resolve, reject) => {
if (!entry.Key.match(/\/$/)) {
s3.deleteObject(
{
Bucket: bucketName,
Key: entry.Key
},
(err, data) => {
if (err) {
return reject('Failed to remove ' + entry.Key + ': ' + err);
}

resolve(entry.Key);
}
);
}
});
});

return Promise.all(p);
}

function _clearBucket(bucketName) {
return new Promise((resolve, reject) => {
s3.listObjects(
{
Bucket: bucketName
},
(err, data) => {
if (err) {
return reject('Failed to list files in ' + bucketName + ': ' + err);
}

_deleteEntries(bucketName, data.Contents)
.then(resolve);
}
);
});
}

function _uploadKartFile(build, target) {
let info = {
project: build.project,
channel: build.channel,
version: build.version,
build: build.build,
releaseDate: new Date(),
buildDate: build.date
};

return new Promise((resolve, reject) => {
s3.upload(
{
Bucket: target,
Key: 'kart.json',
Body: JSON.stringify(info)
},
(err, data) => {
if (err) {
return reject('Failed uploading the kart file:' + err);
}

resolve(info);
}
);
});
}

/*
* Download, unzip and release a build to a target bucket.
*/
function release(build) {
let bucket = config.projects[build.project].channels[build.channel].bucket;

return _clearBucket(bucket)
.then(() => {
return _doRelease(build, bucket);
}).then(() => {
return _uploadKartFile(build, bucket)
});
}

function _doRelease(build, target) {
return new Promise((resolve, reject) => {
s3.getObject(
{
Bucket: config.rootBucket.name,
Key: build.path
},
(err, data) => {
if (err) {
return reject('Failed to download archive config: ' + error);
}

let rs = new stream.Readable(),
targz = new TarGz(),
parse = targz.createParseStream();

rs._read = function () {}
rs.push(data.Body);
rs.push(null);

parse.on('entry', (entry) => {
if (entry.type === 'File') {
let p = new stream.PassThrough();
entry.pipe(p);

s3.upload(
{
Bucket: target,
Key: entry.path,
Body: p,
ACL: 'public-read'
},
(err, data) => {
if (err) {
return reject('Failed deploying ' + entry.path + ':' + err);
}
}
);
}
});

rs.pipe(parse);

parse.on('finish', () => {
resolve();
});
}
);
// Download build
// unzip into a temporary directory
// wipe the target bucket
// upload to s3
// Write build information
// Make files public
});
}

module.exports = {
configure,
archiveBuild,
list
removeBuild,
list,
release
};
4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
},
"homepage": "https://github.com/KanoComputing/kart#readme",
"dependencies": {
"aws-sdk": "^2.111.0"
"aws-sdk": "^2.111.0",
"semver": "^5.4.1",
"tar.gz": "^1.0.5"
}
}
8 changes: 5 additions & 3 deletions test/playground.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const rlib = require('../lib');
const kart = require('../lib');

rlib.configure({
kart.configure({
awsKey: process.env.AWS_KEY,
awsSecret: process.env.AWS_SECRET,
rootBucket: {
Expand All @@ -9,9 +9,11 @@ rlib.configure({
}
}).then(() => {

rlib.list('kano-code', 'staging', {filter: {version: '1.0.7'},sort: {key: ['version', 'build'], order: 1}, limit: 1})
kart.list('kano-code', 'staging', {sort: {key: ['version', 'build'], order: -1}})
.then((data) => {
console.log(data);

kart.release(data[0]);
}).catch((err) => {
console.log(err);
});
Expand Down

0 comments on commit 0d30d74

Please sign in to comment.