Skip to content

Commit

Permalink
OAuth server side functions (#96)
Browse files Browse the repository at this point in the history
* add+update user; create\delete webhook

* create two api end-points

one to authenticate and another one to create\delete webhooks

(currently I'm sending the githubToken to the client and it sends it back when creating a webhook. need to keep only the firebaseToken on the client side

* install missing dependency for firebase-admin

* Simplify our console service to create defaults (show time and location)

* change consoleService call

* change consoleService call + fix status warning + add missing code

currently, written as a comment since I need to also import the **account key** in some fancy way

* move consoleService into app/models folder

* implement api functions inside files

* add api file

* use api file inside index.js

* ignore mocha reports

* NEVER save private config file

private config file should make it easier to save configuration to disk (DB url, etc)

* save and require Q in our project (missing previous commit)

* change order in api so routes won't override other routes

* create new configuration service

this will handle all configurations (also will save the configuration you add as argv if you add --savePrivate)

I still need to make this simpler, but it's already simpler than the previous one :-)

* use new configService + uncomment firebase admin initialization

* put savePrivate inside configurationService

* remove traces of nconf in other files

* lint

* make sure `updateWith` object is defined

* get the firebase admin from userService

later, should replace this with a function that does this internally

* expose authenticateUsingToken instead of the defaultAuth object

* also use authenticateUsingToken internally for tests

* basic structure for userService specs

I want to change the functions inside userService to not use req and res so that it will be simpler to test.

after that change, I'll change this to test the actual file and not the function I mocked a few lines before :-)

* iif not all firebase admin vars are set, don't authenticate

* change userService model structure to support easier tests

* write mocks to help test individual files

* add some tests for userService auth functions

* comment out e2e and check how to fix the tests timing-out
  • Loading branch information
thatkookooguy committed Sep 29, 2017
1 parent 8f12be2 commit c87b12c
Show file tree
Hide file tree
Showing 19 changed files with 1,244 additions and 424 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Expand Up @@ -7,3 +7,7 @@ coverage/
.idea/

monkeyDB.json

mochawesome-report/

privateConfig.json
217 changes: 206 additions & 11 deletions achievibitDB.js
@@ -1,29 +1,29 @@
var _ = require('lodash');
var nconf = require('nconf');
nconf.argv().env();
var dbLibrary = nconf.get('testDB') ? 'monkey-js' : 'monk';
var Q = require('q');
var configService = require('./app/models/configurationService')();
var CONFIG = configService.get();
var dbLibrary = CONFIG.testDB ? 'monkey-js' : 'monk';
var monk = require(dbLibrary);
var async = require('async');
var utilities = require('./utilities');
var github = require('octonode');
var request = require('request');
var colors = require('colors');
var client = github.client({
username: nconf.get('githubUser'),
password: nconf.get('githubPassword')
username: CONFIG.githubUser,
password: CONFIG.githubPassword
});
var console = require('./consoleService')('achievibitDB', [
'cyan',
'inverse'
], process.console);
var console = require('./app/models/consoleService')();

var url = nconf.get('databaseUrl');
var url = CONFIG.databaseUrl;
var db = monk(url);
var apiUrl = 'https://api.github.com/repos/';

var collections = {
repos: db.get('repos'),
users: db.get('users')
users: db.get('users'),
// uses to store additional private user data
userSettings: db.get('auth_users')
};

var achievibitDB = {};
Expand All @@ -38,6 +38,9 @@ achievibitDB.updatePartialArray = updatePartialArray;
achievibitDB.getExtraPRData = getExtraPRData;
achievibitDB.addPRItems = addPRItems;
achievibitDB.connectUsersAndRepos = connectUsersAndRepos;
achievibitDB.createAchievibitWebhook = createAchievibitWebhook;
achievibitDB.deleteAchievibitWebhook = deleteAchievibitWebhook;
achievibitDB.getAndUpdateUserData = getAndUpdateUserData;

module.exports = achievibitDB;

Expand Down Expand Up @@ -541,6 +544,198 @@ function getReactions(comment) {
};
}

function createAchievibitWebhook(repoName, gToken, uid) {
var githubWebhookConfig = {
name: 'web', //'achievibit',
active: true,
events: [
'pull_request',
'pull_request_review',
'pull_request_review_comment'
],
config: {
'url': 'http://achievibit.kibibit.io/',
'content_type': 'json'
}
};

var creatWebhookUrl = [
apiUrl,
repoName,
'/hooks'
].join('');
request({
method: 'POST',
url: creatWebhookUrl,
headers: {
'User-Agent': 'achievibit',
'Authorization': 'token ' + gToken
},
json: true,
body: githubWebhookConfig
}, function(err, response, body) {
if (err) {
console.error('had a problem creating a webhook for ' + repoName, err);
return;
}

if (response.statusCode === 200 || response.statusCode === 201) {
console.log('webhook added successfully');
var identityObject = {
uid: uid
};
findItem('userSettings', identityObject).then(function(savedUser) {
if (!_.isEmpty(savedUser)) {
savedUser = savedUser[0];
var newIntegrations =
_.map(savedUser.reposIntegration, function(repo) {
if (repo.name === repoName) {
repo.id = body.id;
repo.integrated = true;
}

return repo;
});
updatePartially('userSettings', identityObject, {
'reposIntegration': newIntegrations
});
}
});
} else {
console.error([
'creating webhook: ',
'wrong status from server: ',
'[', response.statusCode, ']'
].join(''), body);
}
});
}

function deleteAchievibitWebhook(repoName, gToken, uid) {
var deleteWebhookUrl = [
apiUrl,
repoName,
'/hooks'
].join('');

var identityObject = {
uid: uid
};

findItem('userSettings', identityObject).then(function(savedUser) {
if (!_.isEmpty(savedUser)) {
savedUser = savedUser[0];
var repoUserData = _.find(savedUser.reposIntegration, {
name: repoName
});

if (!repoUserData.id) {
return 'error!';
}
deleteWebhookUrl += '/' + repoUserData.id;
repoUserData.id = null;
repoUserData.integrated = false;

request({
method: 'DELETE',
url: deleteWebhookUrl,
headers: {
'User-Agent': 'achievibit',
'Authorization': 'token ' + gToken
},
json: true
}, function(err, response, body) {
if (err) {
console.error('had a problem deleting a webhook for ' + repoName,
err);
return;
}

updatePartially('userSettings', identityObject, {
'reposIntegration': savedUser.reposIntegration
});
});
} else {
return 'error!';
}
});
}

function getAndUpdateUserData(uid, updateWith) {
var deferred = Q.defer();
if (_.isNil(uid)) { deferred.reject('expected a uid'); }

// var authUsers = collections.userSettings;
var identityObject = {
uid: uid
};

updateWith = updateWith || {};

findItem('userSettings', identityObject).then(function(savedUser) {
if (!_.isEmpty(savedUser)) {
savedUser = savedUser[0];
if (!_.isEmpty(updateWith)) { // new sign in so update tokens
updatePartially('userSettings', identityObject, updateWith);
}
// we don't wait for the promise here because we already have the new data
// update if needed
savedUser.username = updateWith.username || savedUser.username;
savedUser.githubToken = updateWith.githubToken || savedUser.githubToken;
// return the updated saved user
deferred.resolve(savedUser);
} else { // this is a new user in our database
// we should have this data given from the client if it's a new user,
// but something can go wrong sometimes, so: defaults.
var newUser = {
username: updateWith.username || null,
uid: uid,
signedUpOn: Date.now(),
postAchievementsAsComments:
updateWith.postAchievementsAsComments || true,
reposIntegration: updateWith.reposIntegration || [],
timezone: updateWith.timezone || null,
githubToken: updateWith.githubToken || null
};

// get the user's repos and store them in the user object
var client = github.client(newUser.githubToken);
var ghme = client.me();

ghme.repos(function(err, repos) { // headers
if (err) resolve.reject('couldn\'t fetch repos');
else {
var parsedRepos = [];
_.forEach(repos, function(repo) {
//var escapedRepoName = _.replace(repo.full_name, /\./g, '@@@');
parsedRepos.push({
name: repo.full_name,
integrated: false
});
});
newUser.reposIntegration = parsedRepos;

// test out automatic integration with Thatkookooguy/monkey-js
// createAchievibitWebhook(_.find(repos, {
// 'full_name': 'Thatkookooguy/monkey-js'
// }), newUser.githubToken);

insertItem('userSettings', newUser);
// this is added to the db. create a copy of new user first
var returnedUser = _.clone(newUser);
returnedUser.newUser = true;

deferred.resolve(returnedUser);
}
});
}
}, function(error) {
deferred.reject('something went wrong with searching a user', error);
});

return deferred.promise;
}

function getNewFileFromPatch(patch) {
if (!patch) {
return;
Expand Down
57 changes: 57 additions & 0 deletions app/models/badgeService.js
@@ -0,0 +1,57 @@
var _ = require('lodash');
var badge = require('gh-badges');

var achievements = require('require-all')({
dirname: appRoot + '/achievements',
filter: /(.+achievement)\.js$/,
excludeDirs: /^\.(git|svn)$/,
recursive: true
});

var badgeService = {};

badgeService.get = function(req, res) {
badge.loadFont('./Verdana.ttf', function() {
badge(
{
text: [
'achievements',
_.keys(achievements).length
],
colorA: '#894597',
colorB: '#5d5d5d',
template: 'flat',
logo: [
'data:image/png;base64,iVBORw0KGgoAAAA',
'NSUhEUgAAACAAAAAgCAYAAABzenr0AAAABmJL',
'R0QA/wD/AP+gvaeTAAAA/0lEQVRYhe3WMU7DM',
'BjFcadqh0qdWWBl7QU4Ss/AjsREF8RdOhYO0E',
'qoN2DhFIgBOvBjIIMVxSFyUiEhP8lD7C/v/T9',
'7sEMoKkoIe+Npn8qpOgCM2VBVVa1ZkzFDcjQd',
'apDqLIR+u/jnO1AACkABKABdAO9DjHEWfb7lA',
'LwOAQghXPXx6gJ4zE3GJIRwE0095Zhc4PO3iz',
'7x7zoq+cB5bifr9tg0AK7xFZXcZYXXZjNs+wB',
'giofG8hazbIDaeI5dFwAu8dxY2mE+KDyCWGCT',
'YLj3c86xNliMEh5BVLjFseNEjnVN8pU0BsgSh',
'5bwA5YnC25AVFjhpR6rk3Zd9K/1Dcae2pUn6m',
'qiAAAAAElFTkSuQmCC'
].join('')
},
function(svg) {
res.setHeader('Content-Type', 'image/svg+xml;charset=utf-8');
res.setHeader('Pragma-directive', 'no-cache');
res.setHeader('Cache-directive', 'no-cache');
res.setHeader('Pragma','no-cache');
res.setHeader('Expires','0');
// Cache management - no cache,
// so it won't be cached by GitHub's CDN.
res.setHeader('Cache-Control',
'no-cache, no-store, must-revalidate');

res.send(svg);
}
);
});
};

module.exports = badgeService;
73 changes: 73 additions & 0 deletions app/models/configurationService.js
@@ -0,0 +1,73 @@
var _ = require('lodash');
var console = require('./consoleService')();
var nconf = require('nconf');
var auth = require('http-auth'); // @see https://github.com/gevorg/http-auth

var allAchievibitConfigNames = [
'firebaseType',
'firebaseProjectId',
'firebasePrivateKeyId',
'firebasePrivateKey',
'firebaseClientEmail',
'firebaseClientId',
'firebaseAuthUri',
'firebaseTokenUri',
'firebaseAPx509CU',
'firebaseCx509CU',
'port',
'databaseUrl',
'stealth',
'testDB',
'logsUsername',
'logsPassword',
'ngrokToken'
];

// look for config in:
nconf
.argv()
.env({whitelist: allAchievibitConfigNames})
.file({ file: 'privateConfig.json' });

var configService = function() {

var shouldSaveToFile = nconf.get('savePrivate');

if (shouldSaveToFile) {
_.forEach(allAchievibitConfigNames, function(varName) {
nconf.set(varName, nconf.get(varName));
});

nconf.save(function (err) {
if (err) {
console.error('problem saving private configuration');
} else {
console.info('PERSONAL CONFIG SAVED! DELETE WHEN FINISHED!');
}
});
}

return {
get: function(name) {
return nconf.get(name);
},
haveLogsAuth: !_.isNil(nconf.get('logsUsername')),
createLogsAuthForExpress: function() {
var basicAuth = auth.basic({
realm: 'achievibit ScribeJS WebPanel'
}, function (username, password, callback) {
var logsUsername = nconf.get('logsUsername') ?
nconf.get('logsUsername') + '' : '';

var logsPassword = nconf.get('logsPassword') ?
nconf.get('logsPassword') + '' : '';

callback(username === logsUsername && password === logsPassword);
});

return auth.connect(basicAuth);
}
};
};

module.exports = configService;

0 comments on commit c87b12c

Please sign in to comment.