Skip to content

Commit

Permalink
Merge pull request #10 from ibarsi/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
ibarsi authored Feb 21, 2017
2 parents 98f0b14 + 6583839 commit 4d3ba6e
Show file tree
Hide file tree
Showing 3 changed files with 161 additions and 125 deletions.
10 changes: 6 additions & 4 deletions main/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ clear();

console.log(
chalk.yellow(
figlet.textSync('Git Velocity', { horizontalLayout: 'full' })
figlet.textSync('Git Velocity', {
horizontalLayout: 'full'
})
)
);

Expand All @@ -36,15 +38,15 @@ async(function* () {
const { type } = yield getRepositoryType();
const commits = Commits(type);

const isTokenInitialized = yield commits.isCredsTokenInitialized();
const isAuthorized = yield commits.isAuthorized();

if (!isTokenInitialized) {
if (!isAuthorized) {
console.log();
console.log(chalk.white('Creating auth token in root.'));

const { username, password } = yield getRepositoryCreds(type);

commits.storeCreds(username, password);
commits.authorize(username, password);
}

console.log();
Expand Down
48 changes: 48 additions & 0 deletions main/modules/auth.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
/* ==================================================
AUTH
================================================== */

import fs from 'fs';

import { isFile, partial } from './helpers';

// PUBLIC

export function Auth(token) {
return {
isCredsTokenInitialized: partial(_isCredsTokenInitialized, token),
getCreds: partial(_getCreds, token),
storeCreds: partial(_storeCreds, token)
};
}

export default {
Auth
};

// PRIVATE

function _isCredsTokenInitialized(token) {
return new Promise(resolve => resolve(isFile(`${ process.env.HOME }/${ token }`)));
}

function _getCreds(token) {
return new Promise((resolve, reject) => {
try {
resolve(JSON.parse(fs.readFileSync(`${ process.env.HOME }/${ token }`, 'utf8')));
}
catch (error) {
reject(error);
}
});
}

function _storeCreds(token, username, password) {
return new Promise((resolve, reject) => {
fs.writeFile(
`${ process.env.HOME }/${ token }`,
JSON.stringify({ username, password }),
error => error ? reject(error) : resolve()
);
});
}
228 changes: 107 additions & 121 deletions main/modules/commits.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,8 @@
COMMITS
================================================== */

import fs from 'fs';

import { isFile, uniq, async, requestPromise } from './helpers';
import { uniq, async, requestPromise } from './helpers';
import { Auth } from './auth';

// PUBLIC

Expand All @@ -14,26 +13,50 @@ export const TYPES = {
};

export function Commits(type = TYPES.GITHUB) {
const config = _initCommitProps(type);
switch (type) {
case TYPES.GITHUB:
return GitHubCommits(Auth('.github_token'));
case TYPES.BITBUCKET:
return BitBucketCommits(Auth('.bitbucket_token'));
default:
break;
}
}

export function getRepositoryTypeFromUrl(repository_url) {
if (!repository_url) { return TYPES.GITHUB; }

if (repository_url.indexOf('github') >= 0) {
return TYPES.GITHUB;
}
else if (repository_url.indexOf('bitbucket') >= 0) {
return TYPES.BITBUCKET;
}

return TYPES.GITHUB;
}

export default {
TYPES,
Commits,
getRepositoryTypeFromUrl
};

// BITBUCKET

function BitBucketCommits(auth) {
const config = {
commits_url: 'https://api.bitbucket.org/2.0/repositories/{owner}/{repo}/commits'
};

return {
isCredsTokenInitialized() {
return new Promise(resolve => resolve(isFile(`${ process.env.HOME }/${ config.token }`)));
},
storeCreds(username, password) {
return new Promise((resolve, reject) => {
fs.writeFile(
`${ process.env.HOME }/${ config.token }`,
JSON.stringify({ username, password }),
error => error ? reject(error) : resolve()
);
});
},
isAuthorized: auth.isCredsTokenInitialized,
authorize: auth.storeCreds,
getCommitsByRepo(repository, owner) {
return new Promise((resolve, reject) => {
async(function* () {
try {
const { username, password } = yield _getCreds(config.token);
const { username, password } = yield auth.getCreds();

const options = {
url: config.commits_url.replace('{owner}', owner).replace('{repo}', repository),
Expand All @@ -45,29 +68,9 @@ export function Commits(type = TYPES.GITHUB) {
}
};

switch (type) {
case TYPES.GITHUB:
const branches = yield requestPromise(config.branches_url.replace('{owner}', owner).replace('{repo}', repository), options.config);
const branch_commit_results = yield Promise.all(branches.data.map(branch => {
return _requestFullGitHubResponse(Object.assign({}, options, {
url: `${ options.url }?sha=${ branch.name }`
}));
}));

const github_commits = branch_commit_results.reduce((acc, list) => acc.concat(list), []);
const unique_commits = uniq(github_commits, item => item.sha);

resolve(unique_commits.map(GitHubCommit));
break;
case TYPES.BITBUCKET:
const bitbucket_commits = yield _requestFullBitBucketResponse(options);

resolve(bitbucket_commits.map(BitBucketCommit));
break;
default:
resolve([]);
break;
}
const commits = yield _requestPagedResponse(options, response => response.data.next);

resolve(commits.reduce((acc, value) => acc.concat(value.values), []).map(BitBucketCommit));
}
catch (error) {
reject(error);
Expand All @@ -78,27 +81,6 @@ export function Commits(type = TYPES.GITHUB) {
};
}

export function getRepositoryTypeFromUrl(repository_url) {
if (!repository_url) { return TYPES.GITHUB; }

if (repository_url.indexOf('github') >= 0) {
return TYPES.GITHUB;
}
else if (repository_url.indexOf('bitbucket') >= 0) {
return TYPES.BITBUCKET;
}

return TYPES.GITHUB;
}

export default {
TYPES,
Commits,
getRepositoryTypeFromUrl
};

// PRIVATE

function BitBucketCommit(value) {
return {
id: value.sha,
Expand All @@ -108,6 +90,65 @@ function BitBucketCommit(value) {
};
}

// GITHUB

function GitHubCommits(auth) {
const config = {
commits_url: 'https://api.github.com/repos/{owner}/{repo}/commits',
branches_url: 'https://api.github.com/repos/{owner}/{repo}/branches'
};

const nextPageFunc = response => {
const link = response.headers.link;

if (link && link.indexOf('rel="next"') >= 0) {
const next_url = link.substring(0, link.indexOf('rel="next"'));
const next_url_formatted = next_url.trim().replace('<', '').replace('>', '').replace(';', '');

return next_url_formatted;
}
};

return {
isAuthorized: auth.isCredsTokenInitialized,
authorize: auth.storeCreds,
getCommitsByRepo(repository, owner) {
return new Promise((resolve, reject) => {
async(function* () {
try {
const { username, password } = yield auth.getCreds();

const options = {
url: config.commits_url.replace('{owner}', owner).replace('{repo}', repository),
config: {
headers: {
'User-Agent': owner,
Authorization: 'Basic ' + new Buffer(`${ username }:${ password }`).toString('base64')
}
}
};

const branches = yield requestPromise(config.branches_url.replace('{owner}', owner).replace('{repo}', repository), options.config);
const branch_commit_results = yield Promise.all(branches.data.map(branch => {
return _requestPagedResponse(Object.assign({}, options, {
url: `${ options.url }?sha=${ branch.name }`
}), nextPageFunc);
}));

const github_commits = branch_commit_results.reduce((acc, list) => acc.concat(list), []);
const unique_commits = uniq(github_commits, item => item.sha);

resolve(unique_commits.map(GitHubCommit));
}
catch (error) {
reject(error);
}
});
});
}
};
}

function GitHubCommit(value) {
return {
id: value.sha,
Expand All @@ -117,75 +158,20 @@ function GitHubCommit(value) {
};
}

function _initCommitProps(type) {
switch (type) {
case TYPES.BITBUCKET:
return {
commits_url: 'https://api.bitbucket.org/2.0/repositories/{owner}/{repo}/commits',
token: '.bitbucket_token'
};
case TYPES.GITHUB:
return {
commits_url: 'https://api.github.com/repos/{owner}/{repo}/commits',
branches_url: 'https://api.github.com/repos/{owner}/{repo}/branches',
token: '.github_token'
};
default:
return {
commits_url: '',
branches_url: '',
token: ''
};
}
}

function _getCreds(token) {
return new Promise((resolve, reject) => {
try {
resolve(JSON.parse(fs.readFileSync(`${ process.env.HOME }/${ token }`, 'utf8')));
}
catch (error) {
reject(error);
}
});
}

function _requestFullBitBucketResponse(options, values = []) {
return new Promise((resolve, reject) => {
async(function* () {
try {
const { url, config } = options;
const response = yield requestPromise(url, config);
const chunked_values = values.concat(response.data.values);

if (response.data.next) {
resolve(_requestFullBitBucketResponse({ url: response.data.next, config }, chunked_values));
}

resolve(chunked_values);
}
catch (error) {
reject(error);
}
});
});
}
// PRIVATE

function _requestFullGitHubResponse(options, values = []) {
function _requestPagedResponse(options, next_page_func, values = []) {
return new Promise((resolve, reject) => {
async(function* () {
try {
const { url, config } = options;
const response = yield requestPromise(url, config);
const chunked_values = values.concat(response.data);

const link = response.headers.link;

if (link && link.indexOf('rel="next"') >= 0) {
const next_url = link.substring(0, link.indexOf('rel="next"'));
const next_url_formatted = next_url.trim().replace('<', '').replace('>', '').replace(';', '');
const next_page_url = next_page_func(response);

resolve(_requestFullGitHubResponse({ url: next_url_formatted, config }, chunked_values));
if (next_page_url) {
resolve(_requestPagedResponse({ url: next_page_url, config }, next_page_func, chunked_values));
}

resolve(chunked_values);
Expand Down

0 comments on commit 4d3ba6e

Please sign in to comment.