From 6b562ab874d7e5a35c23b63413122f6d7387092d Mon Sep 17 00:00:00 2001 From: Gallagher Polyn Date: Sat, 25 Apr 2015 17:44:34 -0400 Subject: [PATCH 1/6] changed config --- lib/config.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/config.js b/lib/config.js index ab7cd2e..98e03d3 100644 --- a/lib/config.js +++ b/lib/config.js @@ -3,9 +3,9 @@ module.exports = { // Services - mongo_url: process.env.MONGOLAB_URI || 'mongodb://localhost:27017/appDev', - rabbit_url: process.env.CLOUDAMQP_URL || 'amqp://localhost', - port: int(process.env.PORT) || 5000, + mongo_url: process.env.MONGOLAB_URI || 'mongodb://192.168.59.103:27017/appDev', + rabbit_url: process.env.CLOUDAMQP_URL || 'amqp://192.168.59.103', + port: 5050, //int(process.env.PORT) || 5000, // Security cookie_secret: process.env.COOKIE_SECRET || 'myCookieSecret', From 232cb90af6f62c01e4e36f68cb73eaeddef709c2 Mon Sep 17 00:00:00 2001 From: Gallagher Polyn Date: Sat, 25 Apr 2015 17:45:00 -0400 Subject: [PATCH 2/6] update to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index c2100e6..6311765 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ npm-debug.log /test/benchmarks *.env +*.swp From 35ef910798e4bb97f05d9992d31b5a08db39f247 Mon Sep 17 00:00:00 2001 From: Gallagher Polyn Date: Mon, 27 Apr 2015 15:49:53 -0400 Subject: [PATCH 3/6] added nodemon + npm run dev-start uses Procfile_dev --- Procfile_dev | 2 ++ package.json | 5 ++++- script/dev-start | 3 +++ 3 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 Procfile_dev create mode 100755 script/dev-start diff --git a/Procfile_dev b/Procfile_dev new file mode 100644 index 0000000..fae6dc3 --- /dev/null +++ b/Procfile_dev @@ -0,0 +1,2 @@ +web: nodemon lib/server.js +worker: nodemon lib/worker.js diff --git a/package.json b/package.json index 3ebbcd1..211ee5a 100644 --- a/package.json +++ b/package.json @@ -4,6 +4,7 @@ "description": "A minimum viable node app for production deployment", "main": "index.js", "scripts": { + "dev-start": "script/dev-start", "start": "nf start", "reset": "script/reset", "benchmark": "script/benchmark" @@ -33,7 +34,9 @@ "superagent": "^0.18.2", "throng": "^1.0.0" }, - "devDependencies": {}, + "devDependencies": { + "nodemon": "^1.3.7" + }, "engines": { "node": "0.11.x" } diff --git a/script/dev-start b/script/dev-start new file mode 100755 index 0000000..5acc205 --- /dev/null +++ b/script/dev-start @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +nf start -j Procfile_dev From e573595b6ca6f3e664d4beb18bd1131e25c13b05 Mon Sep 17 00:00:00 2001 From: Gallagher Polyn Date: Tue, 28 Apr 2015 07:43:14 -0400 Subject: [PATCH 4/6] changes that seem work work locally --- lib/app/index.js | 52 +-------------------- lib/server.js | 1 - lib/work/errors.js | 32 +++++++++++++ lib/work/index.js | 111 +++++++++++++++++++++++++++++++++++++++++++++ lib/worker.js | 2 +- 5 files changed, 146 insertions(+), 52 deletions(-) create mode 100644 lib/work/errors.js create mode 100644 lib/work/index.js diff --git a/lib/app/index.js b/lib/app/index.js index 46ab312..2ec0f4f 100644 --- a/lib/app/index.js +++ b/lib/app/index.js @@ -10,6 +10,7 @@ var SCRAPE_QUEUE = 'jobs.scrape'; var VOTE_QUEUE = 'jobs.vote'; function App(config) { + logger.log({ type: 'info', msg: 'app/index.App' }); EventEmitter.call(this); this.config = config; @@ -19,6 +20,7 @@ function App(config) { } module.exports = function createApp(config) { + logger.log({ type: 'info', msg: 'app/index.createApp' }); return new App(config); }; @@ -51,19 +53,11 @@ App.prototype.addArticle = function(userId, url) { return Promise.resolve(id); }; -App.prototype.scrapeArticle = function(userId, id, url) { - return this.Article.scrape(userId, id, url); -}; - App.prototype.addUpvote = function(userId, articleId) { this.connections.queue.publish(VOTE_QUEUE, { userId: userId, articleId: articleId }); return Promise.resolve(articleId); }; -App.prototype.upvoteArticle = function(userId, articleId) { - return this.Article.voteFor(userId, articleId); -}; - App.prototype.purgePendingArticles = function() { logger.log({ type: 'info', msg: 'app.purgePendingArticles' }); @@ -98,48 +92,6 @@ App.prototype.listArticles = function(userId, n, fresh) { return this.Article.list(userId, n, fresh); }; -App.prototype.startScraping = function() { - this.connections.queue.handle(SCRAPE_QUEUE, this.handleScrapeJob.bind(this)); - this.connections.queue.handle(VOTE_QUEUE, this.handleVoteJob.bind(this)); - return this; -}; - -App.prototype.handleScrapeJob = function(job, ack) { - logger.log({ type: 'info', msg: 'handling job', queue: SCRAPE_QUEUE, url: job.url }); - - this - .scrapeArticle(job.userId, job.id, job.url) - .then(onSuccess, onError); - - function onSuccess() { - logger.log({ type: 'info', msg: 'job complete', status: 'success', url: job.url }); - ack(); - } - - function onError() { - logger.log({ type: 'info', msg: 'job complete', status: 'failure', url: job.url }); - ack(); - } -}; - -App.prototype.handleVoteJob = function(job, ack) { - logger.log({ type: 'info', msg: 'handling job', queue: VOTE_QUEUE, articleId: job.articleId }); - - this - .upvoteArticle(job.userId, job.articleId) - .then(onSuccess, onError); - - function onSuccess() { - logger.log({ type: 'info', msg: 'job complete', queue: VOTE_QUEUE, status: 'success' }); - ack(); - } - - function onError(err) { - logger.log({ type: 'info', msg: 'job complete', queue: VOTE_QUEUE, status: 'failure', error: err }); - ack(); - } -}; - App.prototype.stopScraping = function() { this.connections.queue.ignore(SCRAPE_QUEUE); this.connections.queue.ignore(VOTE_QUEUE); diff --git a/lib/server.js b/lib/server.js index 310cde3..054cdd5 100644 --- a/lib/server.js +++ b/lib/server.js @@ -25,7 +25,6 @@ function start() { instance.on('lost', abort); function createServer() { - if (config.thrifty) instance.startScraping(); var server = http.createServer(web(instance, config)); process.on('SIGTERM', shutdown); diff --git a/lib/work/errors.js b/lib/work/errors.js new file mode 100644 index 0000000..8ac9e5f --- /dev/null +++ b/lib/work/errors.js @@ -0,0 +1,32 @@ +function ArticleNotFound() { + Error.call(this); + Error.captureStackTrace(this, ArticleNotFound); + this.name = 'ArticleNotFound'; + this.message = 'Article Not Found'; +} + +ArticleNotFound.prototype = Object.create(Error.prototype); + +function VoteNotAllowed() { + Error.call(this); + Error.captureStackTrace(this, VoteNotAllowed); + this.name = 'VoteNotAllowed'; + this.message = 'Vote Not Allowed'; +} + +VoteNotAllowed.prototype = Object.create(Error.prototype); + +function ScrapeFailed() { + Error.call(this); + Error.captureStackTrace(this, ScrapeFailed); + this.name = 'ScrapeFailed'; + this.message = 'Scrape Failed'; +} + +ScrapeFailed.prototype = Object.create(Error.prototype); + +module.exports = { + ArticleNotFound: ArticleNotFound, + VoteNotAllowed: VoteNotAllowed, + ScrapeFailed: ScrapeFailed +}; diff --git a/lib/work/index.js b/lib/work/index.js new file mode 100644 index 0000000..d37d69e --- /dev/null +++ b/lib/work/index.js @@ -0,0 +1,111 @@ +var logger = require('logfmt'); +var Promise = require('promise'); +var EventEmitter = require('events').EventEmitter; + +var connections = require('../app/connections'); +var ArticleModel = require('../app/article-model'); + +var SCRAPE_QUEUE = 'jobs.scrape'; +var VOTE_QUEUE = 'jobs.vote'; + +function Work(config) { + logger.log({ type: 'info', msg: 'work/index.Work' }); + EventEmitter.call(this); + + this.config = config; + this.connections = connections(config.mongo_url, config.rabbit_url); + this.connections.once('ready', this.onConnected.bind(this)); + this.connections.once('lost', this.onLost.bind(this)); +} + +module.exports = function createApp(config) { + return new Work(config); +}; + +Work.prototype = Object.create(EventEmitter.prototype); + +Work.prototype.onConnected = function() { + logger.log({ type: 'info', msg: 'work/index.onConnected' }); + var queues = 0; + this.Article = ArticleModel(this.connections.db, this.config.mongo_cache); + this.connections.queue.create(SCRAPE_QUEUE, { prefetch: 5 }, onCreate.bind(this)); + this.connections.queue.create(VOTE_QUEUE, { prefetch: 5 }, onCreate.bind(this)); + + function onCreate() { + if (++queues === 2) this.onReady(); + } +}; + +Work.prototype.onReady = function() { + logger.log({ type: 'info', msg: 'app.ready' }); + this.emit('ready'); +}; + +Work.prototype.onLost = function() { + logger.log({ type: 'info', msg: 'app.lost' }); + this.emit('lost'); +}; + +Work.prototype.scrapeArticle = function(userId, id, url) { + logger.log({ type: 'info', msg: 'work/index.scrapeArticle' }); + return this.Article.scrape(userId, id, url); +}; + +Work.prototype.upvoteArticle = function(userId, articleId) { + logger.log({ type: 'info', msg: 'work/index.upvoteArticle' }); + return this.Article.voteFor(userId, articleId); +}; + +Work.prototype.startScraping = function() { + logger.log({ type: 'info', msg: 'work/index.startScraping' }); + this.connections.queue.handle(SCRAPE_QUEUE, this.handleScrapeJob.bind(this)); + this.connections.queue.handle(VOTE_QUEUE, this.handleVoteJob.bind(this)); + return this; +}; + +// KEEP +Work.prototype.handleScrapeJob = function(job, ack) { + logger.log({ type: 'info', msg: 'work/index.handleScrapeJob' }); + logger.log({ type: 'info', msg: 'handling job', queue: SCRAPE_QUEUE, url: job.url }); + + this + .scrapeArticle(job.userId, job.id, job.url) + .then(onSuccess, onError); + + function onSuccess() { + logger.log({ type: 'info', msg: 'job complete', status: 'success', url: job.url }); + ack(); + } + + function onError() { + logger.log({ type: 'info', msg: 'job complete', status: 'failure', url: job.url }); + ack(); + } +}; + +// KEEP +Work.prototype.handleVoteJob = function(job, ack) { + logger.log({ type: 'info', msg: 'handling job', queue: VOTE_QUEUE, articleId: job.articleId }); + + this + .upvoteArticle(job.userId, job.articleId) + .then(onSuccess, onError); + + function onSuccess() { + logger.log({ type: 'info', msg: 'job complete', queue: VOTE_QUEUE, status: 'success' }); + ack(); + } + + function onError(err) { + logger.log({ type: 'info', msg: 'job complete', queue: VOTE_QUEUE, status: 'failure', error: err }); + ack(); + } +}; + +// KEEP +Work.prototype.stopScraping = function() { + this.connections.queue.ignore(SCRAPE_QUEUE); + this.connections.queue.ignore(VOTE_QUEUE); + return this; +}; + diff --git a/lib/worker.js b/lib/worker.js index 25e100f..9532595 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -3,7 +3,7 @@ var logger = require('logfmt'); var throng = require('throng'); var config = require('./config'); -var app = require('./app'); +var app = require('./work'); http.globalAgent.maxSockets = Infinity; throng(start, { workers: config.worker_concurrency }); From dfb46e0e39c1c497e817b1944dafe7a3ae74e011 Mon Sep 17 00:00:00 2001 From: Gallagher Polyn Date: Tue, 28 Apr 2015 13:36:49 -0400 Subject: [PATCH 5/6] revert to original port config --- lib/config.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/config.js b/lib/config.js index 98e03d3..6ad8bcf 100644 --- a/lib/config.js +++ b/lib/config.js @@ -5,7 +5,7 @@ module.exports = { // Services mongo_url: process.env.MONGOLAB_URI || 'mongodb://192.168.59.103:27017/appDev', rabbit_url: process.env.CLOUDAMQP_URL || 'amqp://192.168.59.103', - port: 5050, //int(process.env.PORT) || 5000, + port: int(process.env.PORT) || 5000, // Security cookie_secret: process.env.COOKIE_SECRET || 'myCookieSecret', From bd772ca2649eb70baf0d54bf266751c0e2391cca Mon Sep 17 00:00:00 2001 From: Gallagher Polyn Date: Tue, 28 Apr 2015 13:41:14 -0400 Subject: [PATCH 6/6] mongolab is mentioned in config not mongohq --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index 0174b69..c446f24 100644 --- a/readme.md +++ b/readme.md @@ -39,7 +39,7 @@ cd node-articles-nlp heroku create -heroku addons:add mongohq +heroku addons:add mongolab heroku addons:add cloudamqp heroku config:set NODE_ENV=production