diff --git a/app/controllers/posts.js b/app/controllers/posts.js index 13a60ec..479b970 100644 --- a/app/controllers/posts.js +++ b/app/controllers/posts.js @@ -4,12 +4,18 @@ var mongoose = require('mongoose'), var md5 = require('MD5'); var fs = require('fs'); - +var markdown = require('markdown').markdown; var log = require('log4node'); exports.index = function(req, res) { + var criteria = {}; + var page = req.param('page'); + if (page) { + criteria = { pages: page }; + } + log.info('posts.index'); - var query = Post.find() + var query = Post.find(criteria) .populate('author', 'name') .sort('-createdOn') .exec(function(err, results) { @@ -32,19 +38,64 @@ exports.get = function(req, res, next) { }); }; +function renderHtml(input) { + if (input) { + try { + return markdown.toHTML(input); + } catch (err) { + console.log('Failed to render html', err); + return input; + } + } else { + return input; + } +} + +function cleanTags(tags) { + if (!tags) { + return []; + } + + if (!Array.isArray(tags)) { + tags = [tags]; + } + + var results = []; + + for (var i = 0; i < tags.length; i++) { + var tag = tags[i].toString() + .replace(/^#/, '') + .replace(/(\W|\d)/g, '$1 ') + .replace(/\b\w+/g, function(txt) { + return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase(); + }) + .replace(/\s/g, '') + + if (tag) { + results.push(tag); + } + } + + return results; +} + exports.create = function(req, res, next) { log.info('posts.create %j', req.body); var post = new Post({ title: req.body.title, preview: req.body.preview, + previewHtml: renderHtml(req.body.preview), details: req.body.details, + detailsHtml: renderHtml(req.body.details), image: req.body.image, gallery: req.body.gallery, status: req.body.status, createdOn: req.body.createdOn, postedOn: req.body.postedOn, - author: req.user + author: req.user, + pages: req.body.pages, + tags: cleanTags(req.body.tags), }); return post.save(function(err) { @@ -60,13 +111,17 @@ exports.update = function(req, res, next) { console.log('updating post'); return Post.findById(id, function(err, post) { - post.title = req.body.title; - post.preview = req.body.preview; - post.details = req.body.details; - post.image = req.body.image; - post.gallery = req.body.gallery; - post.status = req.body.status; - post.postedOn = req.body.postedOn; + post.title = req.body.title; + post.preview = req.body.preview; + post.previewHtml = renderHtml(req.body.preview); + post.details = req.body.details; + post.detailsHtml = renderHtml(req.body.details); + post.image = req.body.image; + post.gallery = req.body.gallery; + post.status = req.body.status; + post.postedOn = req.body.postedOn; + post.pages = req.body.pages; + post.tags = cleanTags(req.body.tags); return post.save(function(err) { if (err) diff --git a/app/controllers/workorders.js b/app/controllers/workorders.js index 7bb9312..f40f1ed 100644 --- a/app/controllers/workorders.js +++ b/app/controllers/workorders.js @@ -130,18 +130,47 @@ module.exports = function(config, calendar) { if (!notify) return callback(null); - var to = generateToLine(techs).concat(req.body.emails); - if (!to) - return callback(null); + var description = generateDescription(client, workorder, req.user, null, techs); + var techDescription = appendNotes(description, client); - server.send({ - text: generateDescription(client, workorder, req.user, null, techs), - from: config.email.user, - to: to, - subject: 'Workorder created: ' + workorder.biomedId - }, function(err, message) { - callback(err); - }); + var to = req.body.emails; + var techTo = generateToLine(techs); + + var subject = 'Workorder created: ' + workorder.biomedId; + + console.log('-------------------------'); + console.log(to); + + async.waterfall([ + function(cb) { + if (to && to.length > 0) { + var msg = { + text: description, + from: config.email.user, + to: to, + subject: subject + }; + console.log(msg); + server.send(msg, function(err, message) { cb(err); }); + } else { + cb(); + } + }, + function(cb) { + if (techTo) { + var msg = { + text: techDescription, + from: config.email.user, + to: techTo, + subject: subject + }; + console.log(msg); + server.send(msg, function(err, message) { cb(err); }); + } else { + cb(); + } + } + ], callback); }, function(callback) { workorder.save(function(err, result) { callback(err, result); }); @@ -265,18 +294,45 @@ module.exports = function(config, calendar) { if (!notify) return callback(null); - var to = generateToLine(techs); - if (!to) - return callback(null); - - server.send({ - text: generateDescription(client, workorder, createdBy, modifiedBy, techs), - from: config.email.user, - to: to, - subject: 'Workorder updated: ' + workorder.biomedId - }, function(err, message) { - callback(err); - }); + + var description = generateDescription(client, workorder, createdBy, modifiedBy, techs); + var techDescription = appendNotes(description, client); + + var to = req.body.emails; + var techTo = generateToLine(techs); + + var subject = 'Workorder updated: ' + workorder.biomedId; + + async.waterfall([ + function(cb) { + if (to && to.length > 0) { + var msg = { + text: description, + from: config.email.user, + to: to, + subject: subject + }; + console.log(msg); + server.send(msg, function(err, message) { cb(err); }); + } else { + cb(); + } + }, + function(cb) { + if (techTo) { + var msg = { + text: techDescription, + from: config.email.user, + to: techTo, + subject: subject + }; + console.log(msg); + server.send(msg, function(err, message) { cb(err); }); + } else { + cb(); + } + } + ], callback); }, function(callback) { workorder.save(function(err) { @@ -332,6 +388,26 @@ function generateLocation(client) { return sprintf("%(street1)s %(street2)s %(city)s, %(state)s. %(zip)s", data); } +function appendNotes(message, client) { + var template = + "%(message)s\n" + + "Tech Notes:\n" + + " %(notes)s\n" + + "\n"; + + if (client.notes && client.notes['tech']) { + var resources = { + message: message || '', + notes: client.notes['tech'] || '' + }; + + return sprintf(template, resources); + } else { + return message; + } + +} + function generateDescription(client, workorder, createdBy, modifiedBy) { var template = "Workorder ID:\n" + diff --git a/app/model/posts.js b/app/model/posts.js index 17124f4..b0a8bfb 100644 --- a/app/model/posts.js +++ b/app/model/posts.js @@ -5,13 +5,17 @@ var mongoose = require('mongoose'), var postSchema = new Schema({ title: { type: String }, preview: { type: String }, + previewHtml: { type: String }, details: { type: String }, + detailsHtml: { type: String }, image: { type: String }, gallery: [{ type: String }], status: { type: String }, createdOn: { type: Date }, postedOn: { type: Date }, - author: { type: ObjectId, ref: 'User' } + author: { type: ObjectId, ref: 'User' }, + tags: [{ type: String }], + pages: [{ type: String }] }); var Post = module.exports = mongoose.model('Post', postSchema); diff --git a/app/views/index.jade b/app/views/index.jade index c387156..163f09e 100644 --- a/app/views/index.jade +++ b/app/views/index.jade @@ -63,6 +63,8 @@ html(lang="en", ng-app="biomed", ng-controller="biomed.PageCtrl") a(href='/admin') i.icon-wrench | Admin + li.day-of-year + {{dayOfYear}} .container-fluid ng-view !{js} diff --git a/app/views/site.jade b/app/views/site.jade index d69f41c..f662aee 100644 --- a/app/views/site.jade +++ b/app/views/site.jade @@ -38,7 +38,7 @@ html(lang="en", ng-app="site", ng-controller="site.PageCtrl") .controls .dropzone(dropzone='titleImageOptions') - .control-group + .control-group(ng-show='model.image') label.control-label Gallery .controls .dropzone(dropzone='galleryImageOptions') diff --git a/config/express.js b/config/express.js index ce97ae7..b8261b8 100644 --- a/config/express.js +++ b/config/express.js @@ -1,4 +1,5 @@ var express = require('express'); +var ClusterStore = require('strong-cluster-connect-store')(express.session); module.exports = function(app, config, passport, piler) { app.set('showStackError', true); @@ -16,9 +17,7 @@ module.exports = function(app, config, passport, piler) { app.use(express.bodyParser()); app.use(express.methodOverride()); - app.use(express.session({ - secret: 'atlantic_biomed_server_secret' - })); + app.use(express.session({ store: new ClusterStore(), secret: 'atlantic_biomed_server_secret' })); // use passport session app.use(passport.initialize()); diff --git a/launcher.js b/launcher.js new file mode 100644 index 0000000..0b1f2d6 --- /dev/null +++ b/launcher.js @@ -0,0 +1,22 @@ +var cluster = require('cluster'); + +require('strong-cluster-connect-store').setup(); + +cluster.setupMaster({ + exec: 'server.js' +}); + +var cpus = 1; +for (var i = 0; i < cpus; i++) { + cluster.fork(); +} + +cluster.on('online', function(worker) { + console.log('worker ' + worker.process.pid + ' started.'); +}); + +cluster.on('exit', function(worker, code, signal) { + console.log('worker ' + worker.process.pid + ' died'); + cluster.fork(); +}); + diff --git a/node_modules/.bin/md2html b/node_modules/.bin/md2html new file mode 120000 index 0000000..ebcab9a --- /dev/null +++ b/node_modules/.bin/md2html @@ -0,0 +1 @@ +../markdown/bin/md2html.js \ No newline at end of file diff --git a/node_modules/markdown/.npmignore b/node_modules/markdown/.npmignore new file mode 100644 index 0000000..0ec9053 --- /dev/null +++ b/node_modules/markdown/.npmignore @@ -0,0 +1,2 @@ +.seed.yml +test diff --git a/node_modules/markdown/.travis.yml b/node_modules/markdown/.travis.yml new file mode 100644 index 0000000..90cc39d --- /dev/null +++ b/node_modules/markdown/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - "0.6" + - "0.8" + - "0.9" + - "0.10" + - "0.11" diff --git a/node_modules/markdown/Changes.markdown b/node_modules/markdown/Changes.markdown new file mode 100644 index 0000000..9dae3b1 --- /dev/null +++ b/node_modules/markdown/Changes.markdown @@ -0,0 +1,35 @@ +# Changelog for markdown + +## v0.5.0 - 2013-07-26 + +There might be other bug fixes then the ones listed - I've been a bit lax at +updating the changes file, sorry :( + +- Fix 'undefined' appearing in output for some cases with blockquotes +- Fix (multiple) global variable leaks. Ooops +- Fix IE8 issues (#68, #74, #97) +- Fix IE8 issue (#86) +- Handle windows line endings (#58) +- Allow spaces in img/link paths (#48) +- Add explicit text of the license to the readme (#74) +- Style tweaks by Xhmikosr (#83, #81, #82) +- Build now tested by TravisCI thanks to sebs (#85) +- Fix 'cuddled' header parsing (#94) +- Fix images inside links mistakenly requiring a title attribute to parse + correctly (#71) + + +## v0.4.0 - 2012-06-09 + +- Fix for anchors enclosed by parenthesis (issue #46) +- `npm test` will now run the entire test suite cleanly. (switch tests over to + node-tap). (#21) +- Allow inline elements to appear inside link text (#27) +- Improve link parsing when link is inside parenthesis (#38) +- Actually render image references (#36) +- Improve link parsing when multiple on a line (#5) +- Make it work in IE7/8 (#37) +- Fix blockquote merging/implicit conversion between string/String (#44, #24) +- md2html can now process stdin (#43) +- Fix jslint warnings (#42) +- Fix to correctly render self-closing tags (#40, #35, #28) diff --git a/node_modules/markdown/README.markdown b/node_modules/markdown/README.markdown new file mode 100644 index 0000000..d3f231e --- /dev/null +++ b/node_modules/markdown/README.markdown @@ -0,0 +1,185 @@ +# markdown-js + +Yet another markdown parser, this time for JavaScript. There's a few +options that precede this project but they all treat markdown to HTML +conversion as a single step process. You pass markdown in and get HTML +out, end of story. We had some pretty particular views on how the +process should actually look, which include: + + * producing well-formed HTML. This means that `em` and `strong` nesting + is important, as is the ability to output as both HTML and XHTML + + * having an intermediate representation to allow processing of parsed + data (we in fact have two, both [JsonML]: a markdown tree and an HTML tree) + + * being easily extensible to add new dialects without having to + rewrite the entire parsing mechanics + + * having a good test suite. The only test suites we could find tested + massive blocks of input, and passing depended on outputting the HTML + with exactly the same whitespace as the original implementation + +[JsonML]: http://jsonml.org/ "JSON Markup Language" + +## Installation + +Just the `markdown` library: + + npm install markdown + +Optionally, install `md2html` into your path + + npm install -g markdown + +## Usage + +### Node + +The simple way to use it with node is: + +```js +var markdown = require( "markdown" ).markdown; +console.log( markdown.toHTML( "Hello *World*!" ) ); +``` + +### Browser + +It also works in a browser; here is a complete example: + +```html + + + + +
+ + + + +``` + +### Command line + +Assuming you've installed the `md2html` script (see Installation, +above), you can convert markdown to html: + +```bash +# read from a file +md2html /path/to/doc.md > /path/to/doc.html + +# or from stdin +echo 'Hello *World*!' | md2html +``` + +### More options + +If you want more control check out the documentation in +[lib/markdown.js] which details all the methods and parameters +available (including examples!). One day we'll get the docs generated +and hosted somewhere for nicer browsing. + +[lib/markdown.js]: http://github.com/evilstreak/markdown-js/blob/master/lib/markdown.js + +Meanwhile, here's an example of using the multi-step processing to +make wiki-style linking work by filling in missing link references: + +```js +var md = require( "markdown" ).markdown, + text = "[Markdown] is a simple text-based [markup language]\n" + + "created by [John Gruber]\n\n" + + "[John Gruber]: http://daringfireball.net"; + +// parse the markdown into a tree and grab the link references +var tree = md.parse( text ), + refs = tree[ 1 ].references; + +// iterate through the tree finding link references +( function find_link_refs( jsonml ) { + if ( jsonml[ 0 ] === "link_ref" ) { + var ref = jsonml[ 1 ].ref; + + // if there's no reference, define a wiki link + if ( !refs[ ref ] ) { + refs[ ref ] = { + href: "http://en.wikipedia.org/wiki/" + ref.replace(/\s+/, "_" ) + }; + } + } + else if ( Array.isArray( jsonml[ 1 ] ) ) { + jsonml[ 1 ].forEach( find_link_refs ); + } + else if ( Array.isArray( jsonml[ 2 ] ) ) { + jsonml[ 2 ].forEach( find_link_refs ); + } +} )( tree ); + +// convert the tree into html +var html = md.renderJsonML( md.toHTMLTree( tree ) ); +console.log( html ); +``` + +## Intermediate Representation + +Internally the process to convert a chunk of markdown into a chunk of +HTML has three steps: + + 1. Parse the markdown into a JsonML tree. Any references found in the + parsing are stored in the attribute hash of the root node under the + key `references`. + + 2. Convert the markdown tree into an HTML tree. Rename any nodes that + need it (`bulletlist` to `ul` for example) and lookup any references + used by links or images. Remove the references attribute once done. + + 3. Stringify the HTML tree being careful not to wreck whitespace where + whitespace is important (surrounding inline elements for example). + +Each step of this process can be called individually if you need to do +some processing or modification of the data at an intermediate stage. +For example, you may want to grab a list of all URLs linked to in the +document before rendering it to HTML which you could do by recursing +through the HTML tree looking for `a` nodes. + +## Running tests + +To run the tests under node you will need tap installed (it's listed as a +`devDependencies` so `npm install` from the checkout should be enough), then do + + $ npm test + +## Contributing + +Do the usual github fork and pull request dance. Add yourself to the +contributors section of [package.json](/package.json) too if you want to. + +## License + +Released under the MIT license. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/markdown/bin/md2html.js b/node_modules/markdown/bin/md2html.js new file mode 100755 index 0000000..f8ce1dd --- /dev/null +++ b/node_modules/markdown/bin/md2html.js @@ -0,0 +1,52 @@ +#!/usr/bin/env node +(function () { + "use strict"; + + var fs = require("fs") + , markdown = require("markdown").markdown + , nopt = require("nopt") + , stream + , opts + , buffer = "" + ; + + opts = nopt( + { "dialect": [ "Gruber", "Maruku"] + , "help": Boolean + } + ); + + if (opts.help) { + var name = process.argv[1].split("/").pop() + console.warn( require("util").format( + "usage: %s [--dialect=DIALECT] FILE\n\nValid dialects are Gruber (the default) or Maruku", + name + ) ); + process.exit(0); + } + + var fullpath = opts.argv.remain[0]; + + if (fullpath && fullpath !== "-") { + stream = fs.createReadStream(fullpath); + } else { + stream = process.stdin; + } + stream.resume(); + stream.setEncoding("utf8"); + + stream.on("error", function(error) { + console.error(error.toString()); + process.exit(1); + }); + + stream.on("data", function(data) { + buffer += data; + }); + + stream.on("end", function() { + var html = markdown.toHTML(buffer, opts.dialect); + console.log(html); + }); + +}()) diff --git a/node_modules/markdown/lib/index.js b/node_modules/markdown/lib/index.js new file mode 100644 index 0000000..8bb0873 --- /dev/null +++ b/node_modules/markdown/lib/index.js @@ -0,0 +1,3 @@ +// super simple module for the most common nodejs use case. +exports.markdown = require("./markdown"); +exports.parse = exports.markdown.toHTML; diff --git a/node_modules/markdown/lib/markdown.js b/node_modules/markdown/lib/markdown.js new file mode 100644 index 0000000..d365cfa --- /dev/null +++ b/node_modules/markdown/lib/markdown.js @@ -0,0 +1,1725 @@ +// Released under MIT license +// Copyright (c) 2009-2010 Dominic Baggott +// Copyright (c) 2009-2010 Ash Berlin +// Copyright (c) 2011 Christoph Dorn (http://www.christophdorn.com) + +/*jshint browser:true, devel:true */ + +(function( expose ) { + +/** + * class Markdown + * + * Markdown processing in Javascript done right. We have very particular views + * on what constitutes 'right' which include: + * + * - produces well-formed HTML (this means that em and strong nesting is + * important) + * + * - has an intermediate representation to allow processing of parsed data (We + * in fact have two, both as [JsonML]: a markdown tree and an HTML tree). + * + * - is easily extensible to add new dialects without having to rewrite the + * entire parsing mechanics + * + * - has a good test suite + * + * This implementation fulfills all of these (except that the test suite could + * do with expanding to automatically run all the fixtures from other Markdown + * implementations.) + * + * ##### Intermediate Representation + * + * *TODO* Talk about this :) Its JsonML, but document the node names we use. + * + * [JsonML]: http://jsonml.org/ "JSON Markup Language" + **/ +var Markdown = expose.Markdown = function(dialect) { + switch (typeof dialect) { + case "undefined": + this.dialect = Markdown.dialects.Gruber; + break; + case "object": + this.dialect = dialect; + break; + default: + if ( dialect in Markdown.dialects ) { + this.dialect = Markdown.dialects[dialect]; + } + else { + throw new Error("Unknown Markdown dialect '" + String(dialect) + "'"); + } + break; + } + this.em_state = []; + this.strong_state = []; + this.debug_indent = ""; +}; + +/** + * parse( markdown, [dialect] ) -> JsonML + * - markdown (String): markdown string to parse + * - dialect (String | Dialect): the dialect to use, defaults to gruber + * + * Parse `markdown` and return a markdown document as a Markdown.JsonML tree. + **/ +expose.parse = function( source, dialect ) { + // dialect will default if undefined + var md = new Markdown( dialect ); + return md.toTree( source ); +}; + +/** + * toHTML( markdown, [dialect] ) -> String + * toHTML( md_tree ) -> String + * - markdown (String): markdown string to parse + * - md_tree (Markdown.JsonML): parsed markdown tree + * + * Take markdown (either as a string or as a JsonML tree) and run it through + * [[toHTMLTree]] then turn it into a well-formated HTML fragment. + **/ +expose.toHTML = function toHTML( source , dialect , options ) { + var input = expose.toHTMLTree( source , dialect , options ); + + return expose.renderJsonML( input ); +}; + +/** + * toHTMLTree( markdown, [dialect] ) -> JsonML + * toHTMLTree( md_tree ) -> JsonML + * - markdown (String): markdown string to parse + * - dialect (String | Dialect): the dialect to use, defaults to gruber + * - md_tree (Markdown.JsonML): parsed markdown tree + * + * Turn markdown into HTML, represented as a JsonML tree. If a string is given + * to this function, it is first parsed into a markdown tree by calling + * [[parse]]. + **/ +expose.toHTMLTree = function toHTMLTree( input, dialect , options ) { + // convert string input to an MD tree + if ( typeof input ==="string" ) input = this.parse( input, dialect ); + + // Now convert the MD tree to an HTML tree + + // remove references from the tree + var attrs = extract_attr( input ), + refs = {}; + + if ( attrs && attrs.references ) { + refs = attrs.references; + } + + var html = convert_tree_to_html( input, refs , options ); + merge_text_nodes( html ); + return html; +}; + +// For Spidermonkey based engines +function mk_block_toSource() { + return "Markdown.mk_block( " + + uneval(this.toString()) + + ", " + + uneval(this.trailing) + + ", " + + uneval(this.lineNumber) + + " )"; +} + +// node +function mk_block_inspect() { + var util = require("util"); + return "Markdown.mk_block( " + + util.inspect(this.toString()) + + ", " + + util.inspect(this.trailing) + + ", " + + util.inspect(this.lineNumber) + + " )"; + +} + +var mk_block = Markdown.mk_block = function(block, trail, line) { + // Be helpful for default case in tests. + if ( arguments.length == 1 ) trail = "\n\n"; + + var s = new String(block); + s.trailing = trail; + // To make it clear its not just a string + s.inspect = mk_block_inspect; + s.toSource = mk_block_toSource; + + if ( line != undefined ) + s.lineNumber = line; + + return s; +}; + +function count_lines( str ) { + var n = 0, i = -1; + while ( ( i = str.indexOf("\n", i + 1) ) !== -1 ) n++; + return n; +} + +// Internal - split source into rough blocks +Markdown.prototype.split_blocks = function splitBlocks( input, startLine ) { + input = input.replace(/(\r\n|\n|\r)/g, "\n"); + // [\s\S] matches _anything_ (newline or space) + // [^] is equivalent but doesn't work in IEs. + var re = /([\s\S]+?)($|\n#|\n(?:\s*\n|$)+)/g, + blocks = [], + m; + + var line_no = 1; + + if ( ( m = /^(\s*\n)/.exec(input) ) != null ) { + // skip (but count) leading blank lines + line_no += count_lines( m[0] ); + re.lastIndex = m[0].length; + } + + while ( ( m = re.exec(input) ) !== null ) { + if (m[2] == "\n#") { + m[2] = "\n"; + re.lastIndex--; + } + blocks.push( mk_block( m[1], m[2], line_no ) ); + line_no += count_lines( m[0] ); + } + + return blocks; +}; + +/** + * Markdown#processBlock( block, next ) -> undefined | [ JsonML, ... ] + * - block (String): the block to process + * - next (Array): the following blocks + * + * Process `block` and return an array of JsonML nodes representing `block`. + * + * It does this by asking each block level function in the dialect to process + * the block until one can. Succesful handling is indicated by returning an + * array (with zero or more JsonML nodes), failure by a false value. + * + * Blocks handlers are responsible for calling [[Markdown#processInline]] + * themselves as appropriate. + * + * If the blocks were split incorrectly or adjacent blocks need collapsing you + * can adjust `next` in place using shift/splice etc. + * + * If any of this default behaviour is not right for the dialect, you can + * define a `__call__` method on the dialect that will get invoked to handle + * the block processing. + */ +Markdown.prototype.processBlock = function processBlock( block, next ) { + var cbs = this.dialect.block, + ord = cbs.__order__; + + if ( "__call__" in cbs ) { + return cbs.__call__.call(this, block, next); + } + + for ( var i = 0; i < ord.length; i++ ) { + //D:this.debug( "Testing", ord[i] ); + var res = cbs[ ord[i] ].call( this, block, next ); + if ( res ) { + //D:this.debug(" matched"); + if ( !isArray(res) || ( res.length > 0 && !( isArray(res[0]) ) ) ) + this.debug(ord[i], "didn't return a proper array"); + //D:this.debug( "" ); + return res; + } + } + + // Uhoh! no match! Should we throw an error? + return []; +}; + +Markdown.prototype.processInline = function processInline( block ) { + return this.dialect.inline.__call__.call( this, String( block ) ); +}; + +/** + * Markdown#toTree( source ) -> JsonML + * - source (String): markdown source to parse + * + * Parse `source` into a JsonML tree representing the markdown document. + **/ +// custom_tree means set this.tree to `custom_tree` and restore old value on return +Markdown.prototype.toTree = function toTree( source, custom_root ) { + var blocks = source instanceof Array ? source : this.split_blocks( source ); + + // Make tree a member variable so its easier to mess with in extensions + var old_tree = this.tree; + try { + this.tree = custom_root || this.tree || [ "markdown" ]; + + blocks: + while ( blocks.length ) { + var b = this.processBlock( blocks.shift(), blocks ); + + // Reference blocks and the like won't return any content + if ( !b.length ) continue blocks; + + this.tree.push.apply( this.tree, b ); + } + return this.tree; + } + finally { + if ( custom_root ) { + this.tree = old_tree; + } + } +}; + +// Noop by default +Markdown.prototype.debug = function () { + var args = Array.prototype.slice.call( arguments); + args.unshift(this.debug_indent); + if ( typeof print !== "undefined" ) + print.apply( print, args ); + if ( typeof console !== "undefined" && typeof console.log !== "undefined" ) + console.log.apply( null, args ); +} + +Markdown.prototype.loop_re_over_block = function( re, block, cb ) { + // Dont use /g regexps with this + var m, + b = block.valueOf(); + + while ( b.length && (m = re.exec(b) ) != null ) { + b = b.substr( m[0].length ); + cb.call(this, m); + } + return b; +}; + +/** + * Markdown.dialects + * + * Namespace of built-in dialects. + **/ +Markdown.dialects = {}; + +/** + * Markdown.dialects.Gruber + * + * The default dialect that follows the rules set out by John Gruber's + * markdown.pl as closely as possible. Well actually we follow the behaviour of + * that script which in some places is not exactly what the syntax web page + * says. + **/ +Markdown.dialects.Gruber = { + block: { + atxHeader: function atxHeader( block, next ) { + var m = block.match( /^(#{1,6})\s*(.*?)\s*#*\s*(?:\n|$)/ ); + + if ( !m ) return undefined; + + var header = [ "header", { level: m[ 1 ].length } ]; + Array.prototype.push.apply(header, this.processInline(m[ 2 ])); + + if ( m[0].length < block.length ) + next.unshift( mk_block( block.substr( m[0].length ), block.trailing, block.lineNumber + 2 ) ); + + return [ header ]; + }, + + setextHeader: function setextHeader( block, next ) { + var m = block.match( /^(.*)\n([-=])\2\2+(?:\n|$)/ ); + + if ( !m ) return undefined; + + var level = ( m[ 2 ] === "=" ) ? 1 : 2; + var header = [ "header", { level : level }, m[ 1 ] ]; + + if ( m[0].length < block.length ) + next.unshift( mk_block( block.substr( m[0].length ), block.trailing, block.lineNumber + 2 ) ); + + return [ header ]; + }, + + code: function code( block, next ) { + // | Foo + // |bar + // should be a code block followed by a paragraph. Fun + // + // There might also be adjacent code block to merge. + + var ret = [], + re = /^(?: {0,3}\t| {4})(.*)\n?/, + lines; + + // 4 spaces + content + if ( !block.match( re ) ) return undefined; + + block_search: + do { + // Now pull out the rest of the lines + var b = this.loop_re_over_block( + re, block.valueOf(), function( m ) { ret.push( m[1] ); } ); + + if ( b.length ) { + // Case alluded to in first comment. push it back on as a new block + next.unshift( mk_block(b, block.trailing) ); + break block_search; + } + else if ( next.length ) { + // Check the next block - it might be code too + if ( !next[0].match( re ) ) break block_search; + + // Pull how how many blanks lines follow - minus two to account for .join + ret.push ( block.trailing.replace(/[^\n]/g, "").substring(2) ); + + block = next.shift(); + } + else { + break block_search; + } + } while ( true ); + + return [ [ "code_block", ret.join("\n") ] ]; + }, + + horizRule: function horizRule( block, next ) { + // this needs to find any hr in the block to handle abutting blocks + var m = block.match( /^(?:([\s\S]*?)\n)?[ \t]*([-_*])(?:[ \t]*\2){2,}[ \t]*(?:\n([\s\S]*))?$/ ); + + if ( !m ) { + return undefined; + } + + var jsonml = [ [ "hr" ] ]; + + // if there's a leading abutting block, process it + if ( m[ 1 ] ) { + jsonml.unshift.apply( jsonml, this.processBlock( m[ 1 ], [] ) ); + } + + // if there's a trailing abutting block, stick it into next + if ( m[ 3 ] ) { + next.unshift( mk_block( m[ 3 ] ) ); + } + + return jsonml; + }, + + // There are two types of lists. Tight and loose. Tight lists have no whitespace + // between the items (and result in text just in the
  • ) and loose lists, + // which have an empty line between list items, resulting in (one or more) + // paragraphs inside the
  • . + // + // There are all sorts weird edge cases about the original markdown.pl's + // handling of lists: + // + // * Nested lists are supposed to be indented by four chars per level. But + // if they aren't, you can get a nested list by indenting by less than + // four so long as the indent doesn't match an indent of an existing list + // item in the 'nest stack'. + // + // * The type of the list (bullet or number) is controlled just by the + // first item at the indent. Subsequent changes are ignored unless they + // are for nested lists + // + lists: (function( ) { + // Use a closure to hide a few variables. + var any_list = "[*+-]|\\d+\\.", + bullet_list = /[*+-]/, + number_list = /\d+\./, + // Capture leading indent as it matters for determining nested lists. + is_list_re = new RegExp( "^( {0,3})(" + any_list + ")[ \t]+" ), + indent_re = "(?: {0,3}\\t| {4})"; + + // TODO: Cache this regexp for certain depths. + // Create a regexp suitable for matching an li for a given stack depth + function regex_for_depth( depth ) { + + return new RegExp( + // m[1] = indent, m[2] = list_type + "(?:^(" + indent_re + "{0," + depth + "} {0,3})(" + any_list + ")\\s+)|" + + // m[3] = cont + "(^" + indent_re + "{0," + (depth-1) + "}[ ]{0,4})" + ); + } + function expand_tab( input ) { + return input.replace( / {0,3}\t/g, " " ); + } + + // Add inline content `inline` to `li`. inline comes from processInline + // so is an array of content + function add(li, loose, inline, nl) { + if ( loose ) { + li.push( [ "para" ].concat(inline) ); + return; + } + // Hmmm, should this be any block level element or just paras? + var add_to = li[li.length -1] instanceof Array && li[li.length - 1][0] == "para" + ? li[li.length -1] + : li; + + // If there is already some content in this list, add the new line in + if ( nl && li.length > 1 ) inline.unshift(nl); + + for ( var i = 0; i < inline.length; i++ ) { + var what = inline[i], + is_str = typeof what == "string"; + if ( is_str && add_to.length > 1 && typeof add_to[add_to.length-1] == "string" ) { + add_to[ add_to.length-1 ] += what; + } + else { + add_to.push( what ); + } + } + } + + // contained means have an indent greater than the current one. On + // *every* line in the block + function get_contained_blocks( depth, blocks ) { + + var re = new RegExp( "^(" + indent_re + "{" + depth + "}.*?\\n?)*$" ), + replace = new RegExp("^" + indent_re + "{" + depth + "}", "gm"), + ret = []; + + while ( blocks.length > 0 ) { + if ( re.exec( blocks[0] ) ) { + var b = blocks.shift(), + // Now remove that indent + x = b.replace( replace, ""); + + ret.push( mk_block( x, b.trailing, b.lineNumber ) ); + } + else { + break; + } + } + return ret; + } + + // passed to stack.forEach to turn list items up the stack into paras + function paragraphify(s, i, stack) { + var list = s.list; + var last_li = list[list.length-1]; + + if ( last_li[1] instanceof Array && last_li[1][0] == "para" ) { + return; + } + if ( i + 1 == stack.length ) { + // Last stack frame + // Keep the same array, but replace the contents + last_li.push( ["para"].concat( last_li.splice(1, last_li.length - 1) ) ); + } + else { + var sublist = last_li.pop(); + last_li.push( ["para"].concat( last_li.splice(1, last_li.length - 1) ), sublist ); + } + } + + // The matcher function + return function( block, next ) { + var m = block.match( is_list_re ); + if ( !m ) return undefined; + + function make_list( m ) { + var list = bullet_list.exec( m[2] ) + ? ["bulletlist"] + : ["numberlist"]; + + stack.push( { list: list, indent: m[1] } ); + return list; + } + + + var stack = [], // Stack of lists for nesting. + list = make_list( m ), + last_li, + loose = false, + ret = [ stack[0].list ], + i; + + // Loop to search over block looking for inner block elements and loose lists + loose_search: + while ( true ) { + // Split into lines preserving new lines at end of line + var lines = block.split( /(?=\n)/ ); + + // We have to grab all lines for a li and call processInline on them + // once as there are some inline things that can span lines. + var li_accumulate = ""; + + // Loop over the lines in this block looking for tight lists. + tight_search: + for ( var line_no = 0; line_no < lines.length; line_no++ ) { + var nl = "", + l = lines[line_no].replace(/^\n/, function(n) { nl = n; return ""; }); + + // TODO: really should cache this + var line_re = regex_for_depth( stack.length ); + + m = l.match( line_re ); + //print( "line:", uneval(l), "\nline match:", uneval(m) ); + + // We have a list item + if ( m[1] !== undefined ) { + // Process the previous list item, if any + if ( li_accumulate.length ) { + add( last_li, loose, this.processInline( li_accumulate ), nl ); + // Loose mode will have been dealt with. Reset it + loose = false; + li_accumulate = ""; + } + + m[1] = expand_tab( m[1] ); + var wanted_depth = Math.floor(m[1].length/4)+1; + //print( "want:", wanted_depth, "stack:", stack.length); + if ( wanted_depth > stack.length ) { + // Deep enough for a nested list outright + //print ( "new nested list" ); + list = make_list( m ); + last_li.push( list ); + last_li = list[1] = [ "listitem" ]; + } + else { + // We aren't deep enough to be strictly a new level. This is + // where Md.pl goes nuts. If the indent matches a level in the + // stack, put it there, else put it one deeper then the + // wanted_depth deserves. + var found = false; + for ( i = 0; i < stack.length; i++ ) { + if ( stack[ i ].indent != m[1] ) continue; + list = stack[ i ].list; + stack.splice( i+1, stack.length - (i+1) ); + found = true; + break; + } + + if (!found) { + //print("not found. l:", uneval(l)); + wanted_depth++; + if ( wanted_depth <= stack.length ) { + stack.splice(wanted_depth, stack.length - wanted_depth); + //print("Desired depth now", wanted_depth, "stack:", stack.length); + list = stack[wanted_depth-1].list; + //print("list:", uneval(list) ); + } + else { + //print ("made new stack for messy indent"); + list = make_list(m); + last_li.push(list); + } + } + + //print( uneval(list), "last", list === stack[stack.length-1].list ); + last_li = [ "listitem" ]; + list.push(last_li); + } // end depth of shenegains + nl = ""; + } + + // Add content + if ( l.length > m[0].length ) { + li_accumulate += nl + l.substr( m[0].length ); + } + } // tight_search + + if ( li_accumulate.length ) { + add( last_li, loose, this.processInline( li_accumulate ), nl ); + // Loose mode will have been dealt with. Reset it + loose = false; + li_accumulate = ""; + } + + // Look at the next block - we might have a loose list. Or an extra + // paragraph for the current li + var contained = get_contained_blocks( stack.length, next ); + + // Deal with code blocks or properly nested lists + if ( contained.length > 0 ) { + // Make sure all listitems up the stack are paragraphs + forEach( stack, paragraphify, this); + + last_li.push.apply( last_li, this.toTree( contained, [] ) ); + } + + var next_block = next[0] && next[0].valueOf() || ""; + + if ( next_block.match(is_list_re) || next_block.match( /^ / ) ) { + block = next.shift(); + + // Check for an HR following a list: features/lists/hr_abutting + var hr = this.dialect.block.horizRule( block, next ); + + if ( hr ) { + ret.push.apply(ret, hr); + break; + } + + // Make sure all listitems up the stack are paragraphs + forEach( stack, paragraphify, this); + + loose = true; + continue loose_search; + } + break; + } // loose_search + + return ret; + }; + })(), + + blockquote: function blockquote( block, next ) { + if ( !block.match( /^>/m ) ) + return undefined; + + var jsonml = []; + + // separate out the leading abutting block, if any. I.e. in this case: + // + // a + // > b + // + if ( block[ 0 ] != ">" ) { + var lines = block.split( /\n/ ), + prev = [], + line_no = block.lineNumber; + + // keep shifting lines until you find a crotchet + while ( lines.length && lines[ 0 ][ 0 ] != ">" ) { + prev.push( lines.shift() ); + line_no++; + } + + var abutting = mk_block( prev.join( "\n" ), "\n", block.lineNumber ); + jsonml.push.apply( jsonml, this.processBlock( abutting, [] ) ); + // reassemble new block of just block quotes! + block = mk_block( lines.join( "\n" ), block.trailing, line_no ); + } + + + // if the next block is also a blockquote merge it in + while ( next.length && next[ 0 ][ 0 ] == ">" ) { + var b = next.shift(); + block = mk_block( block + block.trailing + b, b.trailing, block.lineNumber ); + } + + // Strip off the leading "> " and re-process as a block. + var input = block.replace( /^> ?/gm, "" ), + old_tree = this.tree, + processedBlock = this.toTree( input, [ "blockquote" ] ), + attr = extract_attr( processedBlock ); + + // If any link references were found get rid of them + if ( attr && attr.references ) { + delete attr.references; + // And then remove the attribute object if it's empty + if ( isEmpty( attr ) ) { + processedBlock.splice( 1, 1 ); + } + } + + jsonml.push( processedBlock ); + return jsonml; + }, + + referenceDefn: function referenceDefn( block, next) { + var re = /^\s*\[(.*?)\]:\s*(\S+)(?:\s+(?:(['"])(.*?)\3|\((.*?)\)))?\n?/; + // interesting matches are [ , ref_id, url, , title, title ] + + if ( !block.match(re) ) + return undefined; + + // make an attribute node if it doesn't exist + if ( !extract_attr( this.tree ) ) { + this.tree.splice( 1, 0, {} ); + } + + var attrs = extract_attr( this.tree ); + + // make a references hash if it doesn't exist + if ( attrs.references === undefined ) { + attrs.references = {}; + } + + var b = this.loop_re_over_block(re, block, function( m ) { + + if ( m[2] && m[2][0] == "<" && m[2][m[2].length-1] == ">" ) + m[2] = m[2].substring( 1, m[2].length - 1 ); + + var ref = attrs.references[ m[1].toLowerCase() ] = { + href: m[2] + }; + + if ( m[4] !== undefined ) + ref.title = m[4]; + else if ( m[5] !== undefined ) + ref.title = m[5]; + + } ); + + if ( b.length ) + next.unshift( mk_block( b, block.trailing ) ); + + return []; + }, + + para: function para( block, next ) { + // everything's a para! + return [ ["para"].concat( this.processInline( block ) ) ]; + } + } +}; + +Markdown.dialects.Gruber.inline = { + + __oneElement__: function oneElement( text, patterns_or_re, previous_nodes ) { + var m, + res, + lastIndex = 0; + + patterns_or_re = patterns_or_re || this.dialect.inline.__patterns__; + var re = new RegExp( "([\\s\\S]*?)(" + (patterns_or_re.source || patterns_or_re) + ")" ); + + m = re.exec( text ); + if (!m) { + // Just boring text + return [ text.length, text ]; + } + else if ( m[1] ) { + // Some un-interesting text matched. Return that first + return [ m[1].length, m[1] ]; + } + + var res; + if ( m[2] in this.dialect.inline ) { + res = this.dialect.inline[ m[2] ].call( + this, + text.substr( m.index ), m, previous_nodes || [] ); + } + // Default for now to make dev easier. just slurp special and output it. + res = res || [ m[2].length, m[2] ]; + return res; + }, + + __call__: function inline( text, patterns ) { + + var out = [], + res; + + function add(x) { + //D:self.debug(" adding output", uneval(x)); + if ( typeof x == "string" && typeof out[out.length-1] == "string" ) + out[ out.length-1 ] += x; + else + out.push(x); + } + + while ( text.length > 0 ) { + res = this.dialect.inline.__oneElement__.call(this, text, patterns, out ); + text = text.substr( res.shift() ); + forEach(res, add ) + } + + return out; + }, + + // These characters are intersting elsewhere, so have rules for them so that + // chunks of plain text blocks don't include them + "]": function () {}, + "}": function () {}, + + __escape__ : /^\\[\\`\*_{}\[\]()#\+.!\-]/, + + "\\": function escaped( text ) { + // [ length of input processed, node/children to add... ] + // Only esacape: \ ` * _ { } [ ] ( ) # * + - . ! + if ( this.dialect.inline.__escape__.exec( text ) ) + return [ 2, text.charAt( 1 ) ]; + else + // Not an esacpe + return [ 1, "\\" ]; + }, + + "![": function image( text ) { + + // Unlike images, alt text is plain text only. no other elements are + // allowed in there + + // ![Alt text](/path/to/img.jpg "Optional title") + // 1 2 3 4 <--- captures + var m = text.match( /^!\[(.*?)\][ \t]*\([ \t]*([^")]*?)(?:[ \t]+(["'])(.*?)\3)?[ \t]*\)/ ); + + if ( m ) { + if ( m[2] && m[2][0] == "<" && m[2][m[2].length-1] == ">" ) + m[2] = m[2].substring( 1, m[2].length - 1 ); + + m[2] = this.dialect.inline.__call__.call( this, m[2], /\\/ )[0]; + + var attrs = { alt: m[1], href: m[2] || "" }; + if ( m[4] !== undefined) + attrs.title = m[4]; + + return [ m[0].length, [ "img", attrs ] ]; + } + + // ![Alt text][id] + m = text.match( /^!\[(.*?)\][ \t]*\[(.*?)\]/ ); + + if ( m ) { + // We can't check if the reference is known here as it likely wont be + // found till after. Check it in md tree->hmtl tree conversion + return [ m[0].length, [ "img_ref", { alt: m[1], ref: m[2].toLowerCase(), original: m[0] } ] ]; + } + + // Just consume the '![' + return [ 2, "![" ]; + }, + + "[": function link( text ) { + + var orig = String(text); + // Inline content is possible inside `link text` + var res = Markdown.DialectHelpers.inline_until_char.call( this, text.substr(1), "]" ); + + // No closing ']' found. Just consume the [ + if ( !res ) return [ 1, "[" ]; + + var consumed = 1 + res[ 0 ], + children = res[ 1 ], + link, + attrs; + + // At this point the first [...] has been parsed. See what follows to find + // out which kind of link we are (reference or direct url) + text = text.substr( consumed ); + + // [link text](/path/to/img.jpg "Optional title") + // 1 2 3 <--- captures + // This will capture up to the last paren in the block. We then pull + // back based on if there a matching ones in the url + // ([here](/url/(test)) + // The parens have to be balanced + var m = text.match( /^\s*\([ \t]*([^"']*)(?:[ \t]+(["'])(.*?)\2)?[ \t]*\)/ ); + if ( m ) { + var url = m[1]; + consumed += m[0].length; + + if ( url && url[0] == "<" && url[url.length-1] == ">" ) + url = url.substring( 1, url.length - 1 ); + + // If there is a title we don't have to worry about parens in the url + if ( !m[3] ) { + var open_parens = 1; // One open that isn't in the capture + for ( var len = 0; len < url.length; len++ ) { + switch ( url[len] ) { + case "(": + open_parens++; + break; + case ")": + if ( --open_parens == 0) { + consumed -= url.length - len; + url = url.substring(0, len); + } + break; + } + } + } + + // Process escapes only + url = this.dialect.inline.__call__.call( this, url, /\\/ )[0]; + + attrs = { href: url || "" }; + if ( m[3] !== undefined) + attrs.title = m[3]; + + link = [ "link", attrs ].concat( children ); + return [ consumed, link ]; + } + + // [Alt text][id] + // [Alt text] [id] + m = text.match( /^\s*\[(.*?)\]/ ); + + if ( m ) { + + consumed += m[ 0 ].length; + + // [links][] uses links as its reference + attrs = { ref: ( m[ 1 ] || String(children) ).toLowerCase(), original: orig.substr( 0, consumed ) }; + + link = [ "link_ref", attrs ].concat( children ); + + // We can't check if the reference is known here as it likely wont be + // found till after. Check it in md tree->hmtl tree conversion. + // Store the original so that conversion can revert if the ref isn't found. + return [ consumed, link ]; + } + + // [id] + // Only if id is plain (no formatting.) + if ( children.length == 1 && typeof children[0] == "string" ) { + + attrs = { ref: children[0].toLowerCase(), original: orig.substr( 0, consumed ) }; + link = [ "link_ref", attrs, children[0] ]; + return [ consumed, link ]; + } + + // Just consume the "[" + return [ 1, "[" ]; + }, + + + "<": function autoLink( text ) { + var m; + + if ( ( m = text.match( /^<(?:((https?|ftp|mailto):[^>]+)|(.*?@.*?\.[a-zA-Z]+))>/ ) ) != null ) { + if ( m[3] ) { + return [ m[0].length, [ "link", { href: "mailto:" + m[3] }, m[3] ] ]; + + } + else if ( m[2] == "mailto" ) { + return [ m[0].length, [ "link", { href: m[1] }, m[1].substr("mailto:".length ) ] ]; + } + else + return [ m[0].length, [ "link", { href: m[1] }, m[1] ] ]; + } + + return [ 1, "<" ]; + }, + + "`": function inlineCode( text ) { + // Inline code block. as many backticks as you like to start it + // Always skip over the opening ticks. + var m = text.match( /(`+)(([\s\S]*?)\1)/ ); + + if ( m && m[2] ) + return [ m[1].length + m[2].length, [ "inlinecode", m[3] ] ]; + else { + // TODO: No matching end code found - warn! + return [ 1, "`" ]; + } + }, + + " \n": function lineBreak( text ) { + return [ 3, [ "linebreak" ] ]; + } + +}; + +// Meta Helper/generator method for em and strong handling +function strong_em( tag, md ) { + + var state_slot = tag + "_state", + other_slot = tag == "strong" ? "em_state" : "strong_state"; + + function CloseTag(len) { + this.len_after = len; + this.name = "close_" + md; + } + + return function ( text, orig_match ) { + + if ( this[state_slot][0] == md ) { + // Most recent em is of this type + //D:this.debug("closing", md); + this[state_slot].shift(); + + // "Consume" everything to go back to the recrusion in the else-block below + return[ text.length, new CloseTag(text.length-md.length) ]; + } + else { + // Store a clone of the em/strong states + var other = this[other_slot].slice(), + state = this[state_slot].slice(); + + this[state_slot].unshift(md); + + //D:this.debug_indent += " "; + + // Recurse + var res = this.processInline( text.substr( md.length ) ); + //D:this.debug_indent = this.debug_indent.substr(2); + + var last = res[res.length - 1]; + + //D:this.debug("processInline from", tag + ": ", uneval( res ) ); + + var check = this[state_slot].shift(); + if ( last instanceof CloseTag ) { + res.pop(); + // We matched! Huzzah. + var consumed = text.length - last.len_after; + return [ consumed, [ tag ].concat(res) ]; + } + else { + // Restore the state of the other kind. We might have mistakenly closed it. + this[other_slot] = other; + this[state_slot] = state; + + // We can't reuse the processed result as it could have wrong parsing contexts in it. + return [ md.length, md ]; + } + } + }; // End returned function +} + +Markdown.dialects.Gruber.inline["**"] = strong_em("strong", "**"); +Markdown.dialects.Gruber.inline["__"] = strong_em("strong", "__"); +Markdown.dialects.Gruber.inline["*"] = strong_em("em", "*"); +Markdown.dialects.Gruber.inline["_"] = strong_em("em", "_"); + + +// Build default order from insertion order. +Markdown.buildBlockOrder = function(d) { + var ord = []; + for ( var i in d ) { + if ( i == "__order__" || i == "__call__" ) continue; + ord.push( i ); + } + d.__order__ = ord; +}; + +// Build patterns for inline matcher +Markdown.buildInlinePatterns = function(d) { + var patterns = []; + + for ( var i in d ) { + // __foo__ is reserved and not a pattern + if ( i.match( /^__.*__$/) ) continue; + var l = i.replace( /([\\.*+?|()\[\]{}])/g, "\\$1" ) + .replace( /\n/, "\\n" ); + patterns.push( i.length == 1 ? l : "(?:" + l + ")" ); + } + + patterns = patterns.join("|"); + d.__patterns__ = patterns; + //print("patterns:", uneval( patterns ) ); + + var fn = d.__call__; + d.__call__ = function(text, pattern) { + if ( pattern != undefined ) { + return fn.call(this, text, pattern); + } + else + { + return fn.call(this, text, patterns); + } + }; +}; + +Markdown.DialectHelpers = {}; +Markdown.DialectHelpers.inline_until_char = function( text, want ) { + var consumed = 0, + nodes = []; + + while ( true ) { + if ( text.charAt( consumed ) == want ) { + // Found the character we were looking for + consumed++; + return [ consumed, nodes ]; + } + + if ( consumed >= text.length ) { + // No closing char found. Abort. + return null; + } + + var res = this.dialect.inline.__oneElement__.call(this, text.substr( consumed ) ); + consumed += res[ 0 ]; + // Add any returned nodes. + nodes.push.apply( nodes, res.slice( 1 ) ); + } +} + +// Helper function to make sub-classing a dialect easier +Markdown.subclassDialect = function( d ) { + function Block() {} + Block.prototype = d.block; + function Inline() {} + Inline.prototype = d.inline; + + return { block: new Block(), inline: new Inline() }; +}; + +Markdown.buildBlockOrder ( Markdown.dialects.Gruber.block ); +Markdown.buildInlinePatterns( Markdown.dialects.Gruber.inline ); + +Markdown.dialects.Maruku = Markdown.subclassDialect( Markdown.dialects.Gruber ); + +Markdown.dialects.Maruku.processMetaHash = function processMetaHash( meta_string ) { + var meta = split_meta_hash( meta_string ), + attr = {}; + + for ( var i = 0; i < meta.length; ++i ) { + // id: #foo + if ( /^#/.test( meta[ i ] ) ) { + attr.id = meta[ i ].substring( 1 ); + } + // class: .foo + else if ( /^\./.test( meta[ i ] ) ) { + // if class already exists, append the new one + if ( attr["class"] ) { + attr["class"] = attr["class"] + meta[ i ].replace( /./, " " ); + } + else { + attr["class"] = meta[ i ].substring( 1 ); + } + } + // attribute: foo=bar + else if ( /\=/.test( meta[ i ] ) ) { + var s = meta[ i ].split( /\=/ ); + attr[ s[ 0 ] ] = s[ 1 ]; + } + } + + return attr; +} + +function split_meta_hash( meta_string ) { + var meta = meta_string.split( "" ), + parts = [ "" ], + in_quotes = false; + + while ( meta.length ) { + var letter = meta.shift(); + switch ( letter ) { + case " " : + // if we're in a quoted section, keep it + if ( in_quotes ) { + parts[ parts.length - 1 ] += letter; + } + // otherwise make a new part + else { + parts.push( "" ); + } + break; + case "'" : + case '"' : + // reverse the quotes and move straight on + in_quotes = !in_quotes; + break; + case "\\" : + // shift off the next letter to be used straight away. + // it was escaped so we'll keep it whatever it is + letter = meta.shift(); + default : + parts[ parts.length - 1 ] += letter; + break; + } + } + + return parts; +} + +Markdown.dialects.Maruku.block.document_meta = function document_meta( block, next ) { + // we're only interested in the first block + if ( block.lineNumber > 1 ) return undefined; + + // document_meta blocks consist of one or more lines of `Key: Value\n` + if ( ! block.match( /^(?:\w+:.*\n)*\w+:.*$/ ) ) return undefined; + + // make an attribute node if it doesn't exist + if ( !extract_attr( this.tree ) ) { + this.tree.splice( 1, 0, {} ); + } + + var pairs = block.split( /\n/ ); + for ( p in pairs ) { + var m = pairs[ p ].match( /(\w+):\s*(.*)$/ ), + key = m[ 1 ].toLowerCase(), + value = m[ 2 ]; + + this.tree[ 1 ][ key ] = value; + } + + // document_meta produces no content! + return []; +}; + +Markdown.dialects.Maruku.block.block_meta = function block_meta( block, next ) { + // check if the last line of the block is an meta hash + var m = block.match( /(^|\n) {0,3}\{:\s*((?:\\\}|[^\}])*)\s*\}$/ ); + if ( !m ) return undefined; + + // process the meta hash + var attr = this.dialect.processMetaHash( m[ 2 ] ); + + var hash; + + // if we matched ^ then we need to apply meta to the previous block + if ( m[ 1 ] === "" ) { + var node = this.tree[ this.tree.length - 1 ]; + hash = extract_attr( node ); + + // if the node is a string (rather than JsonML), bail + if ( typeof node === "string" ) return undefined; + + // create the attribute hash if it doesn't exist + if ( !hash ) { + hash = {}; + node.splice( 1, 0, hash ); + } + + // add the attributes in + for ( a in attr ) { + hash[ a ] = attr[ a ]; + } + + // return nothing so the meta hash is removed + return []; + } + + // pull the meta hash off the block and process what's left + var b = block.replace( /\n.*$/, "" ), + result = this.processBlock( b, [] ); + + // get or make the attributes hash + hash = extract_attr( result[ 0 ] ); + if ( !hash ) { + hash = {}; + result[ 0 ].splice( 1, 0, hash ); + } + + // attach the attributes to the block + for ( a in attr ) { + hash[ a ] = attr[ a ]; + } + + return result; +}; + +Markdown.dialects.Maruku.block.definition_list = function definition_list( block, next ) { + // one or more terms followed by one or more definitions, in a single block + var tight = /^((?:[^\s:].*\n)+):\s+([\s\S]+)$/, + list = [ "dl" ], + i, m; + + // see if we're dealing with a tight or loose block + if ( ( m = block.match( tight ) ) ) { + // pull subsequent tight DL blocks out of `next` + var blocks = [ block ]; + while ( next.length && tight.exec( next[ 0 ] ) ) { + blocks.push( next.shift() ); + } + + for ( var b = 0; b < blocks.length; ++b ) { + var m = blocks[ b ].match( tight ), + terms = m[ 1 ].replace( /\n$/, "" ).split( /\n/ ), + defns = m[ 2 ].split( /\n:\s+/ ); + + // print( uneval( m ) ); + + for ( i = 0; i < terms.length; ++i ) { + list.push( [ "dt", terms[ i ] ] ); + } + + for ( i = 0; i < defns.length; ++i ) { + // run inline processing over the definition + list.push( [ "dd" ].concat( this.processInline( defns[ i ].replace( /(\n)\s+/, "$1" ) ) ) ); + } + } + } + else { + return undefined; + } + + return [ list ]; +}; + +// splits on unescaped instances of @ch. If @ch is not a character the result +// can be unpredictable + +Markdown.dialects.Maruku.block.table = function table (block, next) { + + var _split_on_unescaped = function(s, ch) { + ch = ch || '\\s'; + if (ch.match(/^[\\|\[\]{}?*.+^$]$/)) { ch = '\\' + ch; } + var res = [ ], + r = new RegExp('^((?:\\\\.|[^\\\\' + ch + '])*)' + ch + '(.*)'), + m; + while(m = s.match(r)) { + res.push(m[1]); + s = m[2]; + } + res.push(s); + return res; + } + + var leading_pipe = /^ {0,3}\|(.+)\n {0,3}\|\s*([\-:]+[\-| :]*)\n((?:\s*\|.*(?:\n|$))*)(?=\n|$)/, + // find at least an unescaped pipe in each line + no_leading_pipe = /^ {0,3}(\S(?:\\.|[^\\|])*\|.*)\n {0,3}([\-:]+\s*\|[\-| :]*)\n((?:(?:\\.|[^\\|])*\|.*(?:\n|$))*)(?=\n|$)/, + i, m; + if (m = block.match(leading_pipe)) { + // remove leading pipes in contents + // (header and horizontal rule already have the leading pipe left out) + m[3] = m[3].replace(/^\s*\|/gm, ''); + } else if (! ( m = block.match(no_leading_pipe))) { + return undefined; + } + + var table = [ "table", [ "thead", [ "tr" ] ], [ "tbody" ] ]; + + // remove trailing pipes, then split on pipes + // (no escaped pipes are allowed in horizontal rule) + m[2] = m[2].replace(/\|\s*$/, '').split('|'); + + // process alignment + var html_attrs = [ ]; + forEach (m[2], function (s) { + if (s.match(/^\s*-+:\s*$/)) html_attrs.push({align: "right"}); + else if (s.match(/^\s*:-+\s*$/)) html_attrs.push({align: "left"}); + else if (s.match(/^\s*:-+:\s*$/)) html_attrs.push({align: "center"}); + else html_attrs.push({}); + }); + + // now for the header, avoid escaped pipes + m[1] = _split_on_unescaped(m[1].replace(/\|\s*$/, ''), '|'); + for (i = 0; i < m[1].length; i++) { + table[1][1].push(['th', html_attrs[i] || {}].concat( + this.processInline(m[1][i].trim()))); + } + + // now for body contents + forEach (m[3].replace(/\|\s*$/mg, '').split('\n'), function (row) { + var html_row = ['tr']; + row = _split_on_unescaped(row, '|'); + for (i = 0; i < row.length; i++) { + html_row.push(['td', html_attrs[i] || {}].concat(this.processInline(row[i].trim()))); + } + table[2].push(html_row); + }, this); + + return [table]; +} + +Markdown.dialects.Maruku.inline[ "{:" ] = function inline_meta( text, matches, out ) { + if ( !out.length ) { + return [ 2, "{:" ]; + } + + // get the preceeding element + var before = out[ out.length - 1 ]; + + if ( typeof before === "string" ) { + return [ 2, "{:" ]; + } + + // match a meta hash + var m = text.match( /^\{:\s*((?:\\\}|[^\}])*)\s*\}/ ); + + // no match, false alarm + if ( !m ) { + return [ 2, "{:" ]; + } + + // attach the attributes to the preceeding element + var meta = this.dialect.processMetaHash( m[ 1 ] ), + attr = extract_attr( before ); + + if ( !attr ) { + attr = {}; + before.splice( 1, 0, attr ); + } + + for ( var k in meta ) { + attr[ k ] = meta[ k ]; + } + + // cut out the string and replace it with nothing + return [ m[ 0 ].length, "" ]; +}; + +Markdown.dialects.Maruku.inline.__escape__ = /^\\[\\`\*_{}\[\]()#\+.!\-|:]/; + +Markdown.buildBlockOrder ( Markdown.dialects.Maruku.block ); +Markdown.buildInlinePatterns( Markdown.dialects.Maruku.inline ); + +var isArray = Array.isArray || function(obj) { + return Object.prototype.toString.call(obj) == "[object Array]"; +}; + +var forEach; +// Don't mess with Array.prototype. Its not friendly +if ( Array.prototype.forEach ) { + forEach = function( arr, cb, thisp ) { + return arr.forEach( cb, thisp ); + }; +} +else { + forEach = function(arr, cb, thisp) { + for (var i = 0; i < arr.length; i++) { + cb.call(thisp || arr, arr[i], i, arr); + } + } +} + +var isEmpty = function( obj ) { + for ( var key in obj ) { + if ( hasOwnProperty.call( obj, key ) ) { + return false; + } + } + + return true; +} + +function extract_attr( jsonml ) { + return isArray(jsonml) + && jsonml.length > 1 + && typeof jsonml[ 1 ] === "object" + && !( isArray(jsonml[ 1 ]) ) + ? jsonml[ 1 ] + : undefined; +} + + + +/** + * renderJsonML( jsonml[, options] ) -> String + * - jsonml (Array): JsonML array to render to XML + * - options (Object): options + * + * Converts the given JsonML into well-formed XML. + * + * The options currently understood are: + * + * - root (Boolean): wether or not the root node should be included in the + * output, or just its children. The default `false` is to not include the + * root itself. + */ +expose.renderJsonML = function( jsonml, options ) { + options = options || {}; + // include the root element in the rendered output? + options.root = options.root || false; + + var content = []; + + if ( options.root ) { + content.push( render_tree( jsonml ) ); + } + else { + jsonml.shift(); // get rid of the tag + if ( jsonml.length && typeof jsonml[ 0 ] === "object" && !( jsonml[ 0 ] instanceof Array ) ) { + jsonml.shift(); // get rid of the attributes + } + + while ( jsonml.length ) { + content.push( render_tree( jsonml.shift() ) ); + } + } + + return content.join( "\n\n" ); +}; + +function escapeHTML( text ) { + return text.replace( /&/g, "&" ) + .replace( //g, ">" ) + .replace( /"/g, """ ) + .replace( /'/g, "'" ); +} + +function render_tree( jsonml ) { + // basic case + if ( typeof jsonml === "string" ) { + return escapeHTML( jsonml ); + } + + var tag = jsonml.shift(), + attributes = {}, + content = []; + + if ( jsonml.length && typeof jsonml[ 0 ] === "object" && !( jsonml[ 0 ] instanceof Array ) ) { + attributes = jsonml.shift(); + } + + while ( jsonml.length ) { + content.push( render_tree( jsonml.shift() ) ); + } + + var tag_attrs = ""; + for ( var a in attributes ) { + tag_attrs += " " + a + '="' + escapeHTML( attributes[ a ] ) + '"'; + } + + // be careful about adding whitespace here for inline elements + if ( tag == "img" || tag == "br" || tag == "hr" ) { + return "<"+ tag + tag_attrs + "/>"; + } + else { + return "<"+ tag + tag_attrs + ">" + content.join( "" ) + ""; + } +} + +function convert_tree_to_html( tree, references, options ) { + var i; + options = options || {}; + + // shallow clone + var jsonml = tree.slice( 0 ); + + if ( typeof options.preprocessTreeNode === "function" ) { + jsonml = options.preprocessTreeNode(jsonml, references); + } + + // Clone attributes if they exist + var attrs = extract_attr( jsonml ); + if ( attrs ) { + jsonml[ 1 ] = {}; + for ( i in attrs ) { + jsonml[ 1 ][ i ] = attrs[ i ]; + } + attrs = jsonml[ 1 ]; + } + + // basic case + if ( typeof jsonml === "string" ) { + return jsonml; + } + + // convert this node + switch ( jsonml[ 0 ] ) { + case "header": + jsonml[ 0 ] = "h" + jsonml[ 1 ].level; + delete jsonml[ 1 ].level; + break; + case "bulletlist": + jsonml[ 0 ] = "ul"; + break; + case "numberlist": + jsonml[ 0 ] = "ol"; + break; + case "listitem": + jsonml[ 0 ] = "li"; + break; + case "para": + jsonml[ 0 ] = "p"; + break; + case "markdown": + jsonml[ 0 ] = "html"; + if ( attrs ) delete attrs.references; + break; + case "code_block": + jsonml[ 0 ] = "pre"; + i = attrs ? 2 : 1; + var code = [ "code" ]; + code.push.apply( code, jsonml.splice( i, jsonml.length - i ) ); + jsonml[ i ] = code; + break; + case "inlinecode": + jsonml[ 0 ] = "code"; + break; + case "img": + jsonml[ 1 ].src = jsonml[ 1 ].href; + delete jsonml[ 1 ].href; + break; + case "linebreak": + jsonml[ 0 ] = "br"; + break; + case "link": + jsonml[ 0 ] = "a"; + break; + case "link_ref": + jsonml[ 0 ] = "a"; + + // grab this ref and clean up the attribute node + var ref = references[ attrs.ref ]; + + // if the reference exists, make the link + if ( ref ) { + delete attrs.ref; + + // add in the href and title, if present + attrs.href = ref.href; + if ( ref.title ) { + attrs.title = ref.title; + } + + // get rid of the unneeded original text + delete attrs.original; + } + // the reference doesn't exist, so revert to plain text + else { + return attrs.original; + } + break; + case "img_ref": + jsonml[ 0 ] = "img"; + + // grab this ref and clean up the attribute node + var ref = references[ attrs.ref ]; + + // if the reference exists, make the link + if ( ref ) { + delete attrs.ref; + + // add in the href and title, if present + attrs.src = ref.href; + if ( ref.title ) { + attrs.title = ref.title; + } + + // get rid of the unneeded original text + delete attrs.original; + } + // the reference doesn't exist, so revert to plain text + else { + return attrs.original; + } + break; + } + + // convert all the children + i = 1; + + // deal with the attribute node, if it exists + if ( attrs ) { + // if there are keys, skip over it + for ( var key in jsonml[ 1 ] ) { + i = 2; + break; + } + // if there aren't, remove it + if ( i === 1 ) { + jsonml.splice( i, 1 ); + } + } + + for ( ; i < jsonml.length; ++i ) { + jsonml[ i ] = convert_tree_to_html( jsonml[ i ], references, options ); + } + + return jsonml; +} + + +// merges adjacent text nodes into a single node +function merge_text_nodes( jsonml ) { + // skip the tag name and attribute hash + var i = extract_attr( jsonml ) ? 2 : 1; + + while ( i < jsonml.length ) { + // if it's a string check the next item too + if ( typeof jsonml[ i ] === "string" ) { + if ( i + 1 < jsonml.length && typeof jsonml[ i + 1 ] === "string" ) { + // merge the second string into the first and remove it + jsonml[ i ] += jsonml.splice( i + 1, 1 )[ 0 ]; + } + else { + ++i; + } + } + // if it's not a string recurse + else { + merge_text_nodes( jsonml[ i ] ); + ++i; + } + } +} + +} )( (function() { + if ( typeof exports === "undefined" ) { + window.markdown = {}; + return window.markdown; + } + else { + return exports; + } +} )() ); diff --git a/node_modules/markdown/markdown-js.sublime-project b/node_modules/markdown/markdown-js.sublime-project new file mode 100644 index 0000000..8eaf225 --- /dev/null +++ b/node_modules/markdown/markdown-js.sublime-project @@ -0,0 +1,10 @@ +{ + "folders": + [ + { + "path": "/Users/ash/code/js/markdown-js", + "folder_exclude_patterns": ["node_modules"], + "file_exclude_patterns": ["*.sublime-*"] + } + ] +} diff --git a/node_modules/markdown/markdown-js.sublime-workspace b/node_modules/markdown/markdown-js.sublime-workspace new file mode 100644 index 0000000..6b12471 --- /dev/null +++ b/node_modules/markdown/markdown-js.sublime-workspace @@ -0,0 +1,1993 @@ +{ + "auto_complete": + { + "selected_items": + [ + [ + "di", + "dialects" + ], + [ + "len", + "length-1" + ], + [ + "parsed", + "parsed_nodes" + ], + [ + "pars", + "parsed_ndoes" + ], + [ + "prev", + "previous" + ], + [ + "con", + "console" + ], + [ + "d", + "dialects" + ], + [ + "pat", + "patterns" + ], + [ + "br", + "break" + ], + [ + "inl", + "Inline" + ], + [ + "inlin", + "inline_until_char" + ], + [ + "Di", + "DialectHelpers" + ], + [ + "sub", + "substr" + ], + [ + "last", + "lastIndex" + ], + [ + "open", + "open_brackets" + ], + [ + "bra", + "bracket" + ], + [ + "ope", + "open_brackets" + ], + [ + "prot", + "prototype" + ], + [ + "pa", + "paragraphify" + ], + [ + "debug", + "debugger" + ], + [ + "test", + "testBasePath" + ], + [ + "read", + "readFileSync" + ], + [ + "mk", + "mk_block_toSource" + ], + [ + "u", + "util" + ], + [ + "return", + "return_to" + ], + [ + "retur", + "return_to" + ], + [ + "base64_c", + "base64_decode" + ], + [ + "var", + "variant" + ], + [ + "Produ", + "ProductX" + ], + [ + "publ", + "publishing_tasks" + ], + [ + "use", + "use_hosted_download" + ], + [ + "publi", + "publishing_tasks" + ], + [ + "Var", + "variant_price" + ], + [ + "pr", + "product2" + ], + [ + "pro", + "product1" + ], + [ + "proc", + "product1" + ], + [ + "paypal", + "paypal_account" + ], + [ + "pay", + "paypal_account" + ], + [ + "bala", + "balance_permissions_granted" + ], + [ + "curr", + "current_user" + ], + [ + "masspay", + "masspay_check" + ], + [ + "redirec", + "redirect_to" + ], + [ + "mass", + "masspay_check_users_path" + ], + [ + "cur", + "current_user" + ], + [ + "per", + "perform_masspay_check" + ], + [ + "eq", + "equity_partner_share" + ], + [ + "equi", + "equity_partner_share" + ], + [ + "brok", + "broker" + ], + [ + "broker", + "broker_share" + ], + [ + "affi", + "affiliate_share" + ], + [ + "net", + "network_share" + ], + [ + "cal", + "caclulate_shares" + ], + [ + "aff", + "affiliate" + ], + [ + "to", + "to_split" + ], + [ + "va", + "variant" + ], + [ + "with", + "with_aff_prog" + ], + [ + "purge", + "purge_cache" + ], + [ + "Ipan", + "IpanUpdater" + ], + [ + "send", + "send_user" + ], + [ + "after_c", + "after_commit" + ], + [ + "show", + "shown_in_marketplace" + ], + [ + "C", + "CATEGORIES" + ], + [ + "after", + "after_commit" + ], + [ + "res", + "resque_pid" + ], + [ + "cus", + "custom_params" + ], + [ + "refund", + "refunded" + ], + [ + "assert", + "assert_equal" + ], + [ + "p", + "payment_plans" + ], + [ + "up", + "upsell_without_price" + ], + [ + "list", + "listing" + ], + [ + "su", + "subscriptions" + ], + [ + "padding", + "padding padding-top: length" + ], + [ + "opt", + "optin_condition" + ], + [ + "sale", + "sale_time_condition" + ], + [ + "op", + "optin_condition" + ], + [ + "click", + "click_count_condition" + ], + [ + "sal", + "sale_time_condition" + ], + [ + "by", + "by_listing" + ], + [ + "optin", + "optin_tracking" + ], + [ + "product", + "product_b" + ], + [ + "prod", + "product_a" + ], + [ + "par", + "params" + ], + [ + "sho", + "show_in_marketplace_state" + ], + [ + "lis", + "listing_state" + ], + [ + "dow", + "download" + ], + [ + "valid", + "validate_state_on_published" + ], + [ + "debu", + "debug Break Point" + ], + [ + "upse", + "upsell_id" + ], + [ + "upsell", + "upsell_parent" + ], + [ + "create", + "created_at" + ], + [ + "creat", + "created_at" + ], + [ + "data", + "database" + ], + [ + "render", + "render_template" + ], + [ + "fak", + "fake_p_id_1" + ], + [ + "fake", + "fake_p_id_1" + ], + [ + "update_", + "update_attribute" + ], + [ + "default", + "default_variant" + ], + [ + "frontend", + "frontend_product" + ], + [ + "front", + "frontend_product" + ], + [ + "updat", + "update_ipan" + ], + [ + "deb", + "debugger" + ], + [ + "ass", + "association" + ], + [ + "listing", + "listing_id" + ], + [ + "re", + "release_date" + ], + [ + "ca", + "category" + ], + [ + "uni", + "units_sold" + ], + [ + "am", + "amount" + ], + [ + "co", + "commission" + ], + [ + "Per", + "Percentage" + ], + [ + "variant", + "variants" + ], + [ + "varia", + "variant" + ], + [ + "tra", + "transaction" + ], + [ + "au", + "autoresponder_type" + ], + [ + "aut", + "autoresponder" + ], + [ + "auto", + "autoresponder_type" + ], + [ + "v", + "variants" + ], + [ + "autop", + "autoresponder_id" + ], + [ + "rel", + "release_date" + ], + [ + "upd", + "update_stats" + ], + [ + "ve", + "vendor_earnings" + ], + [ + "ret", + "return_hash" + ], + [ + "vendor", + "vendor_data" + ], + [ + "affili", + "affiliate_data" + ], + [ + "affil", + "affiliate_data" + ], + [ + "trac", + "tracking_codes" + ], + [ + "full", + "full_product_name" + ], + [ + "af", + "after_create" + ], + [ + "shoul", + "should" + ] + ] + }, + "buffers": + [ + { + "file": "lib/markdown.js", + "settings": + { + "buffer_size": 50548, + "line_ending": "Unix" + } + }, + { + "file": "test/features/github/line_breaks.json", + "settings": + { + "buffer_size": 118, + "line_ending": "Unix" + } + }, + { + "file": "test/features/github/no_em_in_word.text", + "settings": + { + "buffer_size": 91, + "line_ending": "Unix" + } + }, + { + "file": "test/features/github/no_em_in_word.json", + "settings": + { + "buffer_size": 183, + "line_ending": "Unix" + } + }, + { + "file": "test/features/github/auto_linking.text", + "settings": + { + "buffer_size": 54, + "line_ending": "Unix" + } + }, + { + "file": "test/features.t.js", + "settings": + { + "buffer_size": 2359, + "line_ending": "Unix" + } + }, + { + "file": "bin/md2html.js", + "settings": + { + "buffer_size": 1064, + "line_ending": "Unix" + } + }, + { + "file": "test/features/emphasis/simple.text", + "settings": + { + "buffer_size": 74, + "line_ending": "Unix" + } + }, + { + "file": "Changes.markdown", + "settings": + { + "buffer_size": 646, + "line_ending": "Unix" + } + }, + { + "file": "package.json", + "settings": + { + "buffer_size": 1338, + "line_ending": "Unix" + } + } + ], + "build_system": "Packages/Ruby/Ruby.sublime-build", + "command_palette": + { + "height": 47.0, + "selected_items": + [ + [ + "newvie", + "File: New View into File" + ], + [ + "pack", + "Package Control: Remove Package" + ], + [ + "view", + "File: New View into File" + ], + [ + "giste", + "GitHub: Open Gist in Editor" + ], + [ + "gist", + "GitHub: Private Gist from Selection" + ], + [ + "ruby", + "Set Syntax: Ruby" + ], + [ + "html", + "Set Syntax: HTML (Rails)" + ], + [ + "syntax", + "Set Syntax: C#" + ], + [ + "php", + "Set Syntax: PHP" + ], + [ + ":w", + ":w - Save" + ], + [ + "trim", + "Snippet: Lorem ipsum" + ], + [ + "py", + "Set Syntax: Python" + ], + [ + "bash", + "Set Syntax: Shell Script (Bash)" + ], + [ + "text", + "Set Syntax: Textile" + ], + [ + "mark", + "Set Syntax: Markdown" + ], + [ + "c#", + "Set Syntax: C#" + ], + [ + "yaml", + "Set Syntax: YAML" + ], + [ + "perl", + "Set Syntax: Perl" + ], + [ + "pl", + "Set Syntax: Plain Text" + ], + [ + "", + "About" + ] + ], + "width": 467.0 + }, + "console": + { + "height": 129.0 + }, + "distraction_free": + { + "menu_visible": true, + "show_minimap": false, + "show_open_files": true, + "show_tabs": false, + "side_bar_visible": false, + "status_bar_visible": false + }, + "file_history": + [ + "/Users/ash/code/js/markdown-js/bin/namp.js", + "/Users/ash/code/js/markdown-js/test/features.t.js", + "/Users/ash/code/js/markdown-js/test/features/github/auto_linking.json", + "/Users/ash/code/js/markdown-js/test/features/meta/leading_whitespace.json", + "/Users/ash/code/js/markdown-js/package.json", + "/Users/ash/code/js/markdown-js/test/features/github/fenced_code_with_lang.text", + "/Users/ash/code/js/markdown-js/test/features/github/fenced_code_with_lang.json", + "/Users/ash/code/js/markdown-js/test/features/github/fenced_code.json", + "/Users/ash/code/js/markdown-js/test/features/code_fences/fenced.text", + "/Users/ash/code/js/markdown-js/test/features/code_fences/fenced.json", + "/Users/ash/code/digiresults/ops/chef/cookbooks/digiresults/templates/default/proxies/manager.erb", + "/Users/ash/code/js/markdown-js/smpl", + "/Users/ash/code/js/markdown-js/test/features/meta/multiple_classes.text", + "/Users/ash/code/js/markdown-js/test/features/meta/multiple_classes.json", + "/Users/ash/code/js/markdown-js/test/regressions.t.js", + "/Users/ash/code/js/markdown-js/Changes.markdown", + "/Users/ash/code/js/markdown-js/test/features/links/implicit.text", + "/Users/ash/code/js/markdown-js/test/features/links/ref_with_image_ref.text", + "/Users/ash/code/js/markdown-js/lib/markdown.js", + "/Users/ash/code/js/markdown-js/test/features/links/ref_with_image_ref.json", + "/Users/ash/code/js/markdown-js/test/interface.t.js", + "/Users/ash/etc/sublime/Packages/User/Distraction Free.sublime-settings", + "/Users/ash/Library/Application Support/Sublime Text 2/Packages/User/Preferences.sublime-settings", + "/Users/ash/code/digiresults/ops/chef/cookbooks/rvm/attributes/default.rb", + "/Users/ash/code/digiresults/ops/chef/cookbooks/rvm/recipes/default.rb", + "/Users/ash/code/js/markdown-js/test/features/links/parens_inline.text", + "/Users/ash/code/js/markdown-js/test/features/links/in_brackets.json", + "/Users/ash/code/js/markdown-js/test/features/links/in_brackets.text", + "/Users/ash/code/js/markdown-js/test/features/images/ref.text", + "/Users/ash/code/js/markdown-js/test/features/images/ref.json", + "/Users/ash/code/js/markdown-js/test/features/meta/list.text", + "/Users/ash/code/js/markdown-js/test/features/meta/list.json", + "/Users/ash/code/js/markdown-js/test/features/meta/class.text", + "/Users/ash/code/js/markdown-js/bin/md2html.js", + "/Users/ash/code/js/markdown-js/README.markdown", + "/Users/ash/code/js/markdown-js/markdown-js.sublime-project", + "/Users/ash/code/js/markdown-js/test/reg_old.js", + "/Users/ash/code/js/markdown-js/t.js", + "/Users/ash/code/js/markdown-js/node_modules/tap/node_modules/deep-equal/index.js", + "/Users/ash/code/js/markdown-js/node_modules/tap/lib/tap-assert.js", + "/Users/ash/code/js/untitled", + "/Users/ash/etc/bash/rc/01.rvm", + "/Users/ash/etc/bash/rc/01.node", + "/Users/ash/etc/rbenv/rbenv.d/rehash/rbx-2.0.0-dev-fix.bash", + "/Users/ash/code/digiresults/manager/Gemfile", + "/Users/ash/etc/rbenv/shims/rake", + "/Users/ash/code/digiresults/manager/test/functional/reports_controller_test.rb", + "/Users/ash/Dropbox/dta_support_snippets.txt", + "/Users/ash/code/digiresults/manager/app/views/manage/_checklist.html.erb", + "/Users/ash/code/digiresults/manager/app/views/manage/listings/publishing/_not_ready.html.erb", + "/Users/ash/code/digiresults/manager/test/unit/user_test.rb", + "/Users/ash/code/digiresults/manager/test/unit/sale_test.rb", + "/Users/ash/code/digiresults/manager/app/models/sale.rb", + "/Users/ash/code/digiresults/manager/app/controllers/manage/variants_controller.rb", + "/Users/ash/code/digiresults/manager/app/views/manage/products/publishing/_not_ready.html.erb", + "/Users/ash/code/digiresults/manager/lib/ipan_updater.rb", + "/Users/ash/code/digiresults/manager/app/models/listing.rb", + "/Users/ash/code/digiresults/manager/app/models/product.rb", + "/Users/ash/code/digiresults/manager/test/unit/product_test.rb", + "/Users/ash/code/digiresults/manager/app/controllers/manage/products_controller.rb", + "/Users/ash/code/digiresults/manager/app/views/manage/products/edit.html.erb", + "/Users/ash/code/digiresults/manager/app/views/manage/listings/edit.html.erb", + "/Users/ash/code/digiresults/manager/app/views/manage/variants/edit.html.erb", + "/Users/ash/code/digiresults/manager/public/javascripts/application.js", + "/Users/ash/code/digiresults/manager/db/migrate/20120402083309_add_share_columns_to_payment_plans.rb", + "/Users/ash/code/digiresults/manager/app/models/variant.rb", + "/Users/ash/code/digiresults/ops/chef/data_bags/apps/ipan.json", + "/Users/ash/code/digiresults/ops/chef/data_bags/apps/manager.json", + "/Users/ash/Sites/cb_test.php", + "/Users/ash/Sites/decode.php", + "/Users/ash/Downloads/Functions.php", + "/Users/ash/code/digiresults/manager/app/views/manage/products/publishing/_ready.html.erb", + "/Users/ash/code/digiresults/manager/config/locales/en.yml", + "/Users/ash/code/digiresults/manager/app/controllers/manage/listings_controller.rb", + "/Users/ash/code/digiresults/manager/app/helpers/application_helper.rb", + "/Users/ash/code/digiresults/manager/test/unit/ipan_updater_test.rb", + "/Users/ash/code/digiresults/manager/db/migrate/20120329211108_add_fee_discount_to_users.rb", + "/Users/ash/code/digiresults/manager/app/models/user.rb", + "/Users/ash/code/digiresults/manager/db/migrate/20120320103301_move_products_into_upsell_chains.rb", + "/Users/ash/code/digiresults/manager/app/models/discount_code.rb", + "/Users/ash/code/digiresults/manager/lib/price_attributes.rb", + "/Users/ash/code/digiresults/manager/test/unit/variant_test.rb", + "/Users/ash/code/digiresults/manager/test/unit/discount_code_test.rb", + "/Users/ash/code/digiresults/manager/test/functional/manage/discount_codes_controller_test.rb", + "/Users/ash/code/digiresults/manager/app/views/manage/listings/show.html.erb", + "/Users/ash/code/digiresults/manager/app/views/listings/_product.html.erb", + "/Users/ash/code/digiresults/manager/app/views/user_mailer/suspended_refund_failed.text.erb", + "/Users/ash/code/digiresults/manager/app/controllers/users_controller.rb", + "/Users/ash/code/digiresults/manager/test/functional/users_controller_test.rb", + "/Users/ash/Library/Application Support/Sublime Text 2/Packages/Default/Preferences.sublime-settings", + "/Users/ash/code/digiresults/manager/lib/pay_pal.rb", + "/Users/ash/code/digiresults/manager/app/views/users/masspay_check.html.erb", + "/Users/ash/code/digiresults/manager/config/routes.rb", + "/Users/ash/code/digiresults/manager/test/unit/paypal_test.rb", + "/Users/ash/code/digiresults/manager/public/stylesheets/bootstrap.min.css", + "/Users/ash/code/digiresults/manager/app/controllers/sessions_controller.rb", + "/Users/ash/code/digiresults/manager/app/views/users/subscription_permissions.html.erb", + "/Users/ash/code/digiresults/manager/test/factories/payment_plan.rb", + "/Users/ash/code/digiresults/manager/test/functional/api/ipan_v1_payment_plans_controller_test.rb", + "/Users/ash/code/digiresults/manager/app/models/payment_plan.rb", + "/Users/ash/code/digiresults/ipan/lib/prices.rb", + "/Users/ash/.gitconfig", + "/Users/ash/code/digiresults/manager/app/controllers/users/affiliate_programs_controller.rb", + "/Users/ash/.rvm/gems/ruby-1.9.2-p180/gems/activerecord-3.0.4/lib/active_record/transactions.rb", + "/Users/ash/code/digiresults/manager/app/controllers/manage/discount_codes_controller.rb", + "/Users/ash/code/digiresults/manager/db/migrate/20120329211108_add_fee_discount_to_variants.rb", + "/Users/ash/code/digiresults/manager/app/views/manage/variants/_price.html.erb", + "/Users/ash/code/digiresults/manager/Rakefile", + "/Users/ash/code/digiresults/manager/test/factories/discount_code.rb", + "/Users/ash/code/digiresults/manager/tmp/cache/ipan:products/0/29.cache", + "/Users/ash/code/digiresults/manager/config/environments/test.rb", + "/Users/ash/code/digiresults/manager/app/controllers/api/ipan/v2/products_controller.rb", + "/Users/ash/code/digiresults/manager/app/controllers/api/ipan/v2/users_controller.rb", + "/Users/ash/code/digiresults/manager/app/controllers/api/ipan/v1/users_controller.rb", + "/Users/ash/code/digiresults/manager/app/controllers/api/ipan/v1/products_controller.rb", + "/Users/ash/code/digiresults/manager/test/functional/api/ipan_v2_users_controller_test.rb", + "/Users/ash/code/digiresults/manager/test/functional/api/ipan_v2_products_controller_test.rb", + "/Users/ash/code/digiresults/manager/config/database.yml", + "/Users/ash/code/digiresults/ops/chef/cookbooks/application/recipes/resque-worker.rb", + "/Users/ash/code/digiresults/manager/app/controllers/application_controller.rb", + "/Users/ash/code/digiresults/manager/test/functional/api/ipan_v1_users_controller_test.rb", + "/Users/ash/code/digiresults/manager/test/functional/api/ipan_v1_products_controller_test.rb", + "/Users/ash/.rvm/gems/ruby-1.9.2-p180/gems/activesupport-3.0.4/lib/active_support/cache.rb", + "/Users/ash/.rvm/gems/ruby-1.9.2-p180/gems/activesupport-3.0.4/lib/active_support/cache/file_store.rb", + "/Users/ash/code/digiresults/manager/config/environment.rb", + "/Users/ash/.rvm/gems/ruby-1.9.2-p180/gems/activesupport-3.0.4/lib/active_support/cache/strategy/local_cache.rb", + "/Users/ash/.rvm/gems/ruby-1.9.2-p180/gems/railties-3.0.4/lib/rails.rb", + "/Users/ash/code/digiresults/manager/test/test_helper.rb" + ], + "find": + { + "height": 33.0 + }, + "find_in_files": + { + "height": 0.0, + "where_history": + [ + "" + ] + }, + "find_state": + { + "case_sensitive": false, + "find_history": + [ + "debugger", + "parsed_ndoes", + "Markdown.dialects.Gruber", + "strong_em", + "count_l", + "merge_text_nodes", + "buildin", + "i", + "some", + "process_meta_hash", + "split_meta_hash", + "process_met", + "debugger", + "/manage", + "Maruku", + "debugger", + "original", + "attr", + "conact", + "console.log", + "debugger", + "concat", + "link", + "children", + "out", + "patterns_or_re", + "inline", + "m", + "patterns_or_re", + ",", + "escape", + "))))", + "open_brackets", + "m[2]", + "attrs = { href", + "text", + "img_ref", + "link_ref", + "img_ref", + "img", + "img_ref", + "img_reg", + "ref", + "reg", + "\"\\\\\"", + "__call__", + "link", + "forEach", + ".length", + "paragraphify", + "isArray", + "for ( var i", + "stack", + "isArray", + "<<<", + "lists", + "parentDi", + "debugger", + "parentDialect", + "parent", + "Maruku", + "\"list:\"", + "extract_attr", + "custom_tree", + "toTree", + "block = ", + "console.", + "console.log", + "debug_in", + "console.log", + "Gruber", + ".block", + "Gruber.block", + "block_meta", + "depen", + "Gruber", + "dialect", + "scripts", + "// End retu", + "<<<", + "testBaseName", + "Name", + "testBaeName", + "testName", + "t", + "testBasePath", + "test_path", + "path", + "test_path + tests[ t ]", + "test_name", + "isfile", + "( t, md )", + "(md)", + "asserts.same", + "dialect", + "asserts.same", + "toSource", + "diff", + "stringify", + "caller", + "asserts.deepEqual", + "same", + "meta", + "rbenv_dir", + "pending_execution?", + "pending", + "salt", + "Reorder", + "comlete", + ":sales_page", + "variant", + "task", + "listing", + "fee_multiplier", + " ] = ", + "fee_m", + "fee", + "fee_mul", + "fee_multi", + "auto_", + ":paypal_chang", + "paypal_change", + "change_paypal", + "change_to", + "when changing the paypal account", + "chang", + "payer_id", + "current_user" + ], + "highlight": true, + "in_selection": false, + "preserve_case": false, + "regex": false, + "replace_history": + [ + "\n", + "@listing.", + "product", + ".email", + ":payment_type => 'subscription'", + "***", + "#", + "broker_commission", + "tier_2_commission", + "affiliate", + "network", + "", + "refunds_count", + "affiliate_earnings", + "clicks_count", + "affiliate_earnings", + "clicks_count", + "refunds_count" + ], + "reverse": false, + "show_context": true, + "use_buffer2": true, + "whole_word": false, + "wrap": true + }, + "groups": + [ + { + "selected": 12, + "sheets": + [ + { + "buffer": 0, + "file": "lib/markdown.js", + "settings": + { + "buffer_size": 50548, + "regions": + { + }, + "selection": + [ + [ + 42874, + 42874 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JavaScript.tmLanguage", + "tab_size": 2, + "translate_tabs_to_spaces": true + }, + "translation.x": 0.0, + "translation.y": 21357.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 1, + "file": "test/features/github/line_breaks.json", + "settings": + { + "buffer_size": 118, + "regions": + { + }, + "selection": + [ + [ + 106, + 106 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JSON.tmLanguage" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 2, + "file": "test/features/github/no_em_in_word.text", + "settings": + { + "buffer_size": 91, + "regions": + { + }, + "selection": + [ + [ + 68, + 36 + ] + ], + "settings": + { + "syntax": "Packages/Text/Plain text.tmLanguage" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 3, + "file": "test/features/github/no_em_in_word.json", + "settings": + { + "buffer_size": 183, + "regions": + { + }, + "selection": + [ + [ + 183, + 183 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JSON.tmLanguage" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 4, + "file": "test/features/github/auto_linking.text", + "settings": + { + "buffer_size": 54, + "regions": + { + }, + "selection": + [ + [ + 54, + 54 + ] + ], + "settings": + { + "syntax": "Packages/Text/Plain text.tmLanguage" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 5, + "file": "test/features.t.js", + "settings": + { + "buffer_size": 2359, + "regions": + { + }, + "selection": + [ + [ + 1987, + 1987 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JavaScript.tmLanguage", + "tab_size": 2, + "translate_tabs_to_spaces": true + }, + "translation.x": 0.0, + "translation.y": 618.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 6, + "file": "bin/md2html.js", + "settings": + { + "buffer_size": 1064, + "regions": + { + }, + "selection": + [ + [ + 724, + 724 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JavaScript.tmLanguage", + "tab_size": 2, + "translate_tabs_to_spaces": true + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 7, + "file": "test/features/emphasis/simple.text", + "settings": + { + "buffer_size": 74, + "regions": + { + }, + "selection": + [ + [ + 0, + 0 + ] + ], + "settings": + { + "syntax": "Packages/Text/Plain text.tmLanguage" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 0, + "file": "lib/markdown.js", + "settings": + { + "buffer_size": 50548, + "regions": + { + }, + "selection": + [ + [ + 32816, + 32816 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JavaScript.tmLanguage", + "tab_size": 2, + "translate_tabs_to_spaces": true + }, + "translation.x": 0.0, + "translation.y": 16317.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 6, + "file": "bin/md2html.js", + "settings": + { + "buffer_size": 1064, + "regions": + { + }, + "selection": + [ + [ + 492, + 492 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JavaScript.tmLanguage", + "tab_size": 2, + "translate_tabs_to_spaces": true + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 2, + "file": "test/features/github/no_em_in_word.text", + "settings": + { + "buffer_size": 91, + "regions": + { + }, + "selection": + [ + [ + 72, + 72 + ] + ], + "settings": + { + "syntax": "Packages/Text/Plain text.tmLanguage" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 3, + "file": "test/features/github/no_em_in_word.json", + "settings": + { + "buffer_size": 183, + "regions": + { + }, + "selection": + [ + [ + 0, + 0 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JSON.tmLanguage" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 8, + "file": "Changes.markdown", + "settings": + { + "buffer_size": 646, + "regions": + { + }, + "selection": + [ + [ + 0, + 0 + ] + ], + "settings": + { + "spell_check": true, + "syntax": "Packages/Markdown/Markdown.tmLanguage" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + }, + { + "buffer": 9, + "file": "package.json", + "settings": + { + "buffer_size": 1338, + "regions": + { + }, + "selection": + [ + [ + 45, + 45 + ] + ], + "settings": + { + "syntax": "Packages/JavaScript/JSON.tmLanguage", + "tab_size": 2, + "translate_tabs_to_spaces": true + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "type": "text" + } + ] + } + ], + "incremental_find": + { + "height": 32.0 + }, + "input": + { + "height": 29.0 + }, + "layout": + { + "cells": + [ + [ + 0, + 0, + 1, + 1 + ] + ], + "cols": + [ + 0.0, + 1.0 + ], + "rows": + [ + 0.0, + 1.0 + ] + }, + "menu_visible": true, + "replace": + { + "height": 62.0 + }, + "save_all_on_build": true, + "select_file": + { + "height": 0.0, + "selected_items": + [ + [ + "pack", + "package.json" + ], + [ + "changes", + "Changes.markdown" + ], + [ + "noemjson", + "test/features/github/no_em_in_word.json" + ], + [ + "eminwojso", + "test/features/github/no_em_in_word.json" + ], + [ + "emin", + "test/features/github/no_em_in_word.text" + ], + [ + "mark", + "lib/markdown.js" + ], + [ + "", + "test/features/github/no_em_in_word.json" + ], + [ + "emsimpletext", + "test/features/emphasis/simple.text" + ], + [ + "feature", + "test/features.t.js" + ], + [ + "md2", + "bin/md2html.js" + ], + [ + "featu", + "test/features.t.js" + ], + [ + "namp", + "bin/namp.js" + ], + [ + "metalead", + "test/features/meta/leading_whitespace.json" + ], + [ + "pac", + "package.json" + ], + [ + "fenced", + "test/features/github/fenced_code.json" + ], + [ + "smp", + "smpl" + ], + [ + "metamult", + "test/features/meta/multiple_classes.text" + ], + [ + "metamu", + "test/features/meta/multiple_classes.json" + ], + [ + "regre", + "test/regressions.t.js" + ], + [ + "cha", + "Changes.markdown" + ], + [ + "refwithim", + "test/features/links/ref_with_image_ref.json" + ], + [ + "linkimpl", + "test/features/links/implicit.text" + ], + [ + "int", + "test/interface.t.js" + ], + [ + "refwithimage", + "test/features/links/ref_with_image_ref.text" + ], + [ + "linkpareinlin", + "test/features/links/parens_inline.text" + ], + [ + "regres", + "test/regressions.t.js" + ], + [ + "imageref", + "test/features/images/ref.text" + ], + [ + "metalistjs", + "test/features/meta/list.json" + ], + [ + "metalist", + "test/features/meta/list.text" + ], + [ + "fea", + "test/features.t.js" + ], + [ + "read", + "README.markdown" + ], + [ + "feat", + "test/features.t.js" + ], + [ + "packa", + "package.json" + ], + [ + "testunsalte", + "test/unit/sale_test.rb" + ], + [ + "usertest", + "test/unit/user_test.rb" + ], + [ + "reportconttest", + "test/functional/reports_controller_test.rb" + ], + [ + "appmodesal", + "app/models/sale.rb" + ], + [ + "saletest", + "test/unit/sale_test.rb" + ], + [ + "testunuser", + "test/unit/user_test.rb" + ], + [ + "gem", + "Gemfile" + ], + [ + "user.r", + "app/models/user.rb" + ], + [ + "_check", + "app/views/manage/_checklist.html.erb" + ], + [ + "manaprod", + "app/controllers/manage/products_controller.rb" + ], + [ + "manvarcont", + "app/controllers/manage/variants_controller.rb" + ], + [ + "checkli", + "app/views/manage/_checklist.html.erb" + ], + [ + "ipanupd", + "lib/ipan_updater.rb" + ], + [ + "appjs", + "public/javascripts/application.js" + ], + [ + "listpublinotr", + "app/views/manage/listings/publishing/_not_ready.html.erb" + ], + [ + "testunprod", + "test/unit/product_test.rb" + ], + [ + "120402083309_add_share_columns_to_paym", + "db/migrate/20120402083309_add_share_columns_to_payment_plans.rb" + ], + [ + "var", + "app/models/variant.rb" + ], + [ + "modlist", + "app/models/listing.rb" + ], + [ + "modprod", + "app/models/product.rb" + ], + [ + "produtest", + "test/unit/product_test.rb" + ], + [ + "apphelpe", + "app/helpers/application_helper.rb" + ], + [ + "isting", + "app/models/listing.rb" + ], + [ + "manproductcont", + "app/controllers/manage/products_controller.rb" + ], + [ + "en.", + "config/locales/en.yml" + ], + [ + "modlisting", + "app/models/listing.rb" + ], + [ + "notread", + "app/views/manage/products/publishing/_not_ready.html.erb" + ], + [ + "listingnotread", + "app/views/manage/listings/publishing/_not_ready.html.erb" + ], + [ + "manlistincont", + "app/controllers/manage/listings_controller.rb" + ], + [ + "ipanupdatete", + "test/unit/ipan_updater_test.rb" + ], + [ + "pricea", + "lib/price_attributes.rb" + ], + [ + "disco", + "app/models/discount_code.rb" + ], + [ + "teundisccodete", + "test/unit/discount_code_test.rb" + ], + [ + "disccontest", + "test/functional/manage/discount_codes_controller_test.rb" + ], + [ + "vartest", + "test/unit/variant_test.rb" + ], + [ + "listinshow", + "app/views/manage/listings/show.html.erb" + ], + [ + "ipanup", + "lib/ipan_updater.rb" + ], + [ + "testunvartest", + "test/unit/variant_test.rb" + ], + [ + "user.rb", + "app/models/user.rb" + ], + [ + "20120329211108_add_fee_discount_to_variants.rb", + "db/migrate/20120329211108_add_fee_discount_to_variants.rb" + ], + [ + "refund", + "app/views/user_mailer/suspended_refund_failed.text.erb" + ], + [ + "en.yml", + "config/locales/en.yml" + ], + [ + "appmoduser", + "app/models/user.rb" + ], + [ + "apphelp", + "app/helpers/application_helper.rb" + ], + [ + "sesscon", + "app/controllers/sessions_controller.rb" + ], + [ + "testunpayp", + "test/unit/paypal_test.rb" + ], + [ + "bootstr", + "public/stylesheets/bootstrap.min.css" + ], + [ + "routes", + "config/routes.rb" + ], + [ + "usersconttest", + "test/functional/users_controller_test.rb" + ], + [ + "appviewsubper", + "app/views/users/subscription_permissions.html.erb" + ], + [ + "usercontr", + "app/controllers/users_controller.rb" + ], + [ + "list", + "app/models/listing.rb" + ], + [ + "appmodsale", + "app/models/sale.rb" + ], + [ + "payp", + "lib/pay_pal.rb" + ], + [ + "testfacpay", + "test/factories/payment_plan.rb" + ], + [ + "testpayplan", + "test/functional/api/ipan_v1_payment_plans_controller_test.rb" + ], + [ + "modesale", + "app/models/sale.rb" + ], + [ + "modelplan", + "app/models/payment_plan.rb" + ], + [ + "useraffpro", + "app/controllers/users/affiliate_programs_controller.rb" + ], + [ + "tesunidisc", + "test/unit/discount_code_test.rb" + ], + [ + "managdiscount", + "app/controllers/manage/discount_codes_controller.rb" + ], + [ + "appviewvarpri", + "app/views/manage/variants/_price.html.erb" + ], + [ + "rake", + "Rakefile" + ], + [ + "testfacdis", + "test/factories/discount_code.rb" + ], + [ + "discontest", + "test/unit/discount_code_test.rb" + ], + [ + "disc", + "app/models/discount_code.rb" + ], + [ + "29", + "tmp/cache/ipan:products/0/29.cache" + ], + [ + "moddiscount", + "app/models/discount_code.rb" + ], + [ + "modvar", + "app/models/variant.rb" + ], + [ + "appcont", + "app/controllers/application_controller.rb" + ], + [ + "data", + "config/database.yml" + ], + [ + "confroutes", + "config/routes.rb" + ], + [ + "envtest", + "config/environments/test.rb" + ], + [ + "appmodprod", + "app/models/product.rb" + ], + [ + "testipan", + "test/unit/ipan_updater_test.rb" + ], + [ + "ipanproducts", + "app/controllers/api/ipan/v1/products_controller.rb" + ], + [ + "test/functional/api/ipan_v1_users_controller_test.rb", + "test/functional/api/ipan_v1_users_controller_test.rb" + ], + [ + "testunprodte", + "test/unit/product_test.rb" + ], + [ + "ipaupdatest", + "test/unit/ipan_updater_test.rb" + ], + [ + "conenv", + "config/environment.rb" + ], + [ + "modediscou", + "app/models/discount_code.rb" + ], + [ + "apiuserscont", + "app/controllers/api/ipan/v1/users_controller.rb" + ], + [ + "testhelp", + "test/test_helper.rb" + ], + [ + "testfuncmanagaffcontest", + "test/functional/manage/affiliates_controller_test.rb" + ], + [ + "test/unit/listing_test.rb", + "test/unit/listing_test.rb" + ], + [ + "teshe", + "test/test_helper.rb" + ], + [ + "apviewusermailapp", + "app/views/user_mailer/affiliate_approved.text.erb" + ], + [ + "affproguser", + "app/models/affiliate_program_user.rb" + ], + [ + "modelaffprog", + "app/models/affiliate_program.rb" + ], + [ + "appmodlist", + "app/models/listing.rb" + ], + [ + "appmodaffprouser", + "app/models/affiliate_program_user.rb" + ], + [ + "appmailusermail", + "app/mailers/user_mailer.rb" + ], + [ + "testmarkcont", + "test/functional/marketplace_controller_test.rb" + ], + [ + "markp", + "app/controllers/marketplace_controller.rb" + ], + [ + "markcont", + "app/controllers/marketplace_controller.rb" + ] + ], + "width": 0.0 + }, + "select_project": + { + "height": 500.0, + "selected_items": + [ + [ + "", + "/Users/ash/Documents/sublime-projects/manager.sublime-project" + ], + [ + "ipan", + "/Users/ash/Documents/sublime-projects/ipan.sublime-project" + ], + [ + "dab", + "/Users/ash/Documents/sublime-projects/digiarticleblaster.sublime-project" + ], + [ + "ip", + "/Users/ash/Documents/sublime-projects/ipan.sublime-project" + ], + [ + "ipa", + "/Users/ash/Documents/sublime-projects/ipan.sublime-project" + ], + [ + "mn", + "/Users/ash/Documents/manager.sublime-project" + ] + ], + "width": 380.0 + }, + "show_minimap": true, + "show_open_files": false, + "show_tabs": true, + "side_bar_visible": true, + "side_bar_width": 218.0, + "status_bar_visible": true +} diff --git a/node_modules/markdown/node_modules/.bin/nopt b/node_modules/markdown/node_modules/.bin/nopt new file mode 120000 index 0000000..6b6566e --- /dev/null +++ b/node_modules/markdown/node_modules/.bin/nopt @@ -0,0 +1 @@ +../nopt/bin/nopt.js \ No newline at end of file diff --git a/node_modules/markdown/node_modules/nopt/.npmignore b/node_modules/markdown/node_modules/nopt/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/markdown/node_modules/nopt/LICENSE b/node_modules/markdown/node_modules/nopt/LICENSE new file mode 100644 index 0000000..05a4010 --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/LICENSE @@ -0,0 +1,23 @@ +Copyright 2009, 2010, 2011 Isaac Z. Schlueter. +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/markdown/node_modules/nopt/README.md b/node_modules/markdown/node_modules/nopt/README.md new file mode 100644 index 0000000..f290da8 --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/README.md @@ -0,0 +1,210 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you go half-insane just trying to manage them all, and put +it off with duct-tape solutions until you see exactly to the core of the +problem, and finally snap and write an awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + + // my-program.js + var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many" : [String, Array] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + console.log(parsed) + +This would give you support for any of the following: + +```bash +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk 1000 -fp # unknown opts are ok. +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --blatzk true -fp # but they need a value +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many 1 --many null --many foo +{ many: ["1", "null", "foo"] } + +$ node my-program.js --many foo +{ many: ["foo"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents, and numeric values will be +interpreted as a number. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid +options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/node_modules/markdown/node_modules/nopt/bin/nopt.js b/node_modules/markdown/node_modules/nopt/bin/nopt.js new file mode 100755 index 0000000..30e9fdb --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/bin/nopt.js @@ -0,0 +1,51 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String + , clear: Boolean + , config: Boolean + , length: Number + } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] + , c: ["--config"] + , l: ["--length"] + } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/node_modules/markdown/node_modules/nopt/examples/my-program.js b/node_modules/markdown/node_modules/nopt/examples/my-program.js new file mode 100755 index 0000000..142447e --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/examples/my-program.js @@ -0,0 +1,30 @@ +#!/usr/bin/env node + +//process.env.DEBUG_NOPT = 1 + +// my-program.js +var nopt = require("../lib/nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag", "true"] + , "g" : ["--flag"] + , "s" : "--flag" + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + +console.log("parsed =\n"+ require("util").inspect(parsed)) diff --git a/node_modules/markdown/node_modules/nopt/lib/nopt.js b/node_modules/markdown/node_modules/nopt/lib/nopt.js new file mode 100644 index 0000000..20f3b5b --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/lib/nopt.js @@ -0,0 +1,612 @@ +// info about each config option. + +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} + +var url = require("url") + , path = require("path") + , Stream = require("stream").Stream + , abbrev = require("abbrev") + +module.exports = exports = nopt +exports.clean = clean + +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } + } + +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , remain = [] + , cooked = args + , original = args.slice(0) + + parse(args, data, remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = {remain:remain,cooked:cooked,original:original} + Object.defineProperty(data.argv, 'toString', { value: function () { + return this.original.map(JSON.stringify).join(" ") + }, enumerable: false }) + return data +} + +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Number, Array] + + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] + + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } + + if (!types.hasOwnProperty(k)) { + return val + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } + + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) + + if (!val.length) delete data[k] + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] + + debug("k=%s val=%j", k, val, data[k]) + }) +} + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + data[k] = path.resolve(String(val)) + return true +} + +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} + +function validateDate (data, k, val) { + debug("validate Date %j %j %j", k, val, Date.parse(val)) + var s = Date.parse(val) + if (isNaN(s)) return false + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} + +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} + +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true + } + delete data[k] + return false + } + + // an array of anything? + if (type === Array) return true + + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && type === t.type) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } + } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) + + if (!ok) delete data[k] + return ok +} + +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) + + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) + + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break + } + var hadEq = false + if (arg.charAt(0) === "-" && arg.length > 1) { + if (arg.indexOf("=") !== -1) { + hadEq = true + var v = arg.split("=") + arg = v.shift() + v = v.join("=") + args.splice.apply(args, [i, 1].concat([arg, v])) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = null + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } + + if (abbrevs[arg]) arg = abbrevs[arg] + + var isArray = types[arg] === Array || + Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1 + + // allow unknown things to be arrays if specified multiple times. + if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { + if (!Array.isArray(data[arg])) + data[arg] = [data[arg]] + isArray = true + } + + var val + , la = args[i + 1] + + var isBool = typeof no === 'boolean' || + types[arg] === Boolean || + Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 || + (typeof types[arg] === 'undefined' && !hadEq) || + (la === "false" && + (types[arg] === null || + Array.isArray(types[arg]) && ~types[arg].indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (Array.isArray(types[arg]) && la) { + if (~types[arg].indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~types[arg].indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~types[arg].indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) { + // string + val = la + i ++ + } + } + + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + continue + } + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- + } + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue + } + remain.push(arg) + } +} + +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) + return null + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { + l[r] = true + return l + }, {}) + shorthands.___singles = singles + debug('shorthand singles', singles) + } + + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) + + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) + + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) + return null + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) + arg = shortAbbr[arg] + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] +} + +if (module === require.main) { +var assert = require("assert") + , util = require("util") + + , shorthands = + { s : ["--loglevel", "silent"] + , d : ["--loglevel", "info"] + , dd : ["--loglevel", "verbose"] + , ddd : ["--loglevel", "silly"] + , noreg : ["--no-registry"] + , reg : ["--registry"] + , "no-reg" : ["--no-registry"] + , silent : ["--loglevel", "silent"] + , verbose : ["--loglevel", "verbose"] + , h : ["--usage"] + , H : ["--usage"] + , "?" : ["--usage"] + , help : ["--usage"] + , v : ["--version"] + , f : ["--force"] + , desc : ["--description"] + , "no-desc" : ["--no-description"] + , "local" : ["--no-global"] + , l : ["--long"] + , p : ["--parseable"] + , porcelain : ["--parseable"] + , g : ["--global"] + } + + , types = + { aoa: Array + , nullstream: [null, Stream] + , date: Date + , str: String + , browser : String + , cache : path + , color : ["always", Boolean] + , depth : Number + , description : Boolean + , dev : Boolean + , editor : path + , force : Boolean + , global : Boolean + , globalconfig : path + , group : [String, Number] + , gzipbin : String + , logfd : [Number, Stream] + , loglevel : ["silent","win","error","warn","info","verbose","silly"] + , long : Boolean + , "node-version" : [false, String] + , npaturl : url + , npat : Boolean + , "onload-script" : [false, String] + , outfd : [Number, Stream] + , parseable : Boolean + , pre: Boolean + , prefix: path + , proxy : url + , "rebuild-bundle" : Boolean + , registry : url + , searchopts : String + , searchexclude: [null, String] + , shell : path + , t: [Array, String] + , tag : String + , tar : String + , tmp : path + , "unsafe-perm" : Boolean + , usage : Boolean + , user : String + , username : String + , userconfig : path + , version : Boolean + , viewer: path + , _exit : Boolean + } + +; [["-v", {version:true}, []] + ,["---v", {version:true}, []] + ,["ls -s --no-reg connect -d", + {loglevel:"info",registry:null},["ls","connect"]] + ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]] + ,["ls --registry blargle", {}, ["ls"]] + ,["--no-registry", {registry:null}, []] + ,["--no-color true", {color:false}, []] + ,["--no-color false", {color:true}, []] + ,["--no-color", {color:false}, []] + ,["--color false", {color:false}, []] + ,["--color --logfd 7", {logfd:7,color:true}, []] + ,["--color=true", {color:true}, []] + ,["--logfd=10", {logfd:10}, []] + ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]] + ,["--tmp=tmp -tar=gtar", + {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]] + ,["--logfd x", {}, []] + ,["a -true -- -no-false", {true:true},["a","-no-false"]] + ,["a -no-false", {false:false},["a"]] + ,["a -no-no-true", {true:true}, ["a"]] + ,["a -no-no-no-false", {false:false}, ["a"]] + ,["---NO-no-No-no-no-no-nO-no-no"+ + "-No-no-no-no-no-no-no-no-no"+ + "-no-no-no-no-NO-NO-no-no-no-no-no-no"+ + "-no-body-can-do-the-boogaloo-like-I-do" + ,{"body-can-do-the-boogaloo-like-I-do":false}, []] + ,["we are -no-strangers-to-love "+ + "--you-know=the-rules --and=so-do-i "+ + "---im-thinking-of=a-full-commitment "+ + "--no-you-would-get-this-from-any-other-guy "+ + "--no-gonna-give-you-up "+ + "-no-gonna-let-you-down=true "+ + "--no-no-gonna-run-around false "+ + "--desert-you=false "+ + "--make-you-cry false "+ + "--no-tell-a-lie "+ + "--no-no-and-hurt-you false" + ,{"strangers-to-love":false + ,"you-know":"the-rules" + ,"and":"so-do-i" + ,"you-would-get-this-from-any-other-guy":false + ,"gonna-give-you-up":false + ,"gonna-let-you-down":false + ,"gonna-run-around":false + ,"desert-you":false + ,"make-you-cry":false + ,"tell-a-lie":false + ,"and-hurt-you":false + },["we", "are"]] + ,["-t one -t two -t three" + ,{t: ["one", "two", "three"]} + ,[]] + ,["-t one -t null -t three four five null" + ,{t: ["one", "null", "three"]} + ,["four", "five", "null"]] + ,["-t foo" + ,{t:["foo"]} + ,[]] + ,["--no-t" + ,{t:["false"]} + ,[]] + ,["-no-no-t" + ,{t:["true"]} + ,[]] + ,["-aoa one -aoa null -aoa 100" + ,{aoa:["one", null, 100]} + ,[]] + ,["-str 100" + ,{str:"100"} + ,[]] + ,["--color always" + ,{color:"always"} + ,[]] + ,["--no-nullstream" + ,{nullstream:null} + ,[]] + ,["--nullstream false" + ,{nullstream:null} + ,[]] + ,["--notadate=2011-01-25" + ,{notadate: "2011-01-25"} + ,[]] + ,["--date 2011-01-25" + ,{date: new Date("2011-01-25")} + ,[]] + ,["-cl 1" + ,{config: true, length: 1} + ,[] + ,{config: Boolean, length: Number, clear: Boolean} + ,{c: "--config", l: "--length"}] + ,["--acount bla" + ,{"acount":true} + ,["bla"] + ,{account: Boolean, credentials: Boolean, options: String} + ,{a:"--account", c:"--credentials",o:"--options"}] + ,["--clear" + ,{clear:true} + ,[] + ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean} + ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}] + ,["--file -" + ,{"file":"-"} + ,[] + ,{file:String} + ,{}] + ,["--file -" + ,{"file":true} + ,["-"] + ,{file:Boolean} + ,{}] + ].forEach(function (test) { + var argv = test[0].split(/\s+/) + , opts = test[1] + , rem = test[2] + , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0) + , parsed = actual.argv + delete actual.argv + console.log(util.inspect(actual, false, 2, true), parsed.remain) + for (var i in opts) { + var e = JSON.stringify(opts[i]) + , a = JSON.stringify(actual[i] === undefined ? null : actual[i]) + if (e && typeof e === "object") { + assert.deepEqual(e, a) + } else { + assert.equal(e, a) + } + } + assert.deepEqual(rem, parsed.remain) + }) +} diff --git a/node_modules/markdown/node_modules/nopt/node_modules/abbrev/CONTRIBUTING.md b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/CONTRIBUTING.md new file mode 100644 index 0000000..2f30261 --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/CONTRIBUTING.md @@ -0,0 +1,3 @@ + To get started, sign the + Contributor License Agreement. diff --git a/node_modules/markdown/node_modules/nopt/node_modules/abbrev/LICENSE b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/LICENSE new file mode 100644 index 0000000..05a4010 --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/LICENSE @@ -0,0 +1,23 @@ +Copyright 2009, 2010, 2011 Isaac Z. Schlueter. +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/markdown/node_modules/nopt/node_modules/abbrev/README.md b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/README.md new file mode 100644 index 0000000..99746fe --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/README.md @@ -0,0 +1,23 @@ +# abbrev-js + +Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). + +Usage: + + var abbrev = require("abbrev"); + abbrev("foo", "fool", "folding", "flop"); + + // returns: + { fl: 'flop' + , flo: 'flop' + , flop: 'flop' + , fol: 'folding' + , fold: 'folding' + , foldi: 'folding' + , foldin: 'folding' + , folding: 'folding' + , foo: 'foo' + , fool: 'fool' + } + +This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff --git a/node_modules/markdown/node_modules/nopt/node_modules/abbrev/abbrev.js b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/abbrev.js new file mode 100644 index 0000000..69cfeac --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/abbrev.js @@ -0,0 +1,62 @@ + +module.exports = exports = abbrev.abbrev = abbrev + +abbrev.monkeyPatch = monkeyPatch + +function monkeyPatch () { + Object.defineProperty(Array.prototype, 'abbrev', { + value: function () { return abbrev(this) }, + enumerable: false, configurable: true, writable: true + }) + + Object.defineProperty(Object.prototype, 'abbrev', { + value: function () { return abbrev(Object.keys(this)) }, + enumerable: false, configurable: true, writable: true + }) +} + +function abbrev (list) { + if (arguments.length !== 1 || !Array.isArray(list)) { + list = Array.prototype.slice.call(arguments, 0) + } + for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { + args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + args = args.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + var abbrevs = {} + , prev = "" + for (var i = 0, l = args.length ; i < l ; i ++) { + var current = args[i] + , next = args[i + 1] || "" + , nextMatches = true + , prevMatches = true + if (current === next) continue + for (var j = 0, cl = current.length ; j < cl ; j ++) { + var curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j ++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (var a = current.substr(0, j) ; j <= cl ; j ++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/node_modules/markdown/node_modules/nopt/node_modules/abbrev/package.json b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/package.json new file mode 100644 index 0000000..d6d0897 --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/package.json @@ -0,0 +1,29 @@ +{ + "name": "abbrev", + "version": "1.0.5", + "description": "Like ruby's abbrev module, but in js", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "main": "abbrev.js", + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "http://github.com/isaacs/abbrev-js" + }, + "license": { + "type": "MIT", + "url": "https://github.com/isaacs/abbrev-js/raw/master/LICENSE" + }, + "readme": "# abbrev-js\n\nJust like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).\n\nUsage:\n\n var abbrev = require(\"abbrev\");\n abbrev(\"foo\", \"fool\", \"folding\", \"flop\");\n \n // returns:\n { fl: 'flop'\n , flo: 'flop'\n , flop: 'flop'\n , fol: 'folding'\n , fold: 'folding'\n , foldi: 'folding'\n , foldin: 'folding'\n , folding: 'folding'\n , foo: 'foo'\n , fool: 'fool'\n }\n\nThis is handy for command-line scripts, or other cases where you want to be able to accept shorthands.\n", + "readmeFilename": "README.md", + "_id": "abbrev@1.0.5", + "dist": { + "shasum": "8878621df7d367d2b65a37fd163e59df351fbfa4" + }, + "_from": "abbrev@1", + "_resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.5.tgz" +} diff --git a/node_modules/markdown/node_modules/nopt/node_modules/abbrev/test.js b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/test.js new file mode 100644 index 0000000..d5a7303 --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/node_modules/abbrev/test.js @@ -0,0 +1,47 @@ +var abbrev = require('./abbrev.js') +var assert = require("assert") +var util = require("util") + +console.log("TAP Version 13") +var count = 0 + +function test (list, expect) { + count++ + var actual = abbrev(list) + assert.deepEqual(actual, expect, + "abbrev("+util.inspect(list)+") === " + util.inspect(expect) + "\n"+ + "actual: "+util.inspect(actual)) + actual = abbrev.apply(exports, list) + assert.deepEqual(abbrev.apply(exports, list), expect, + "abbrev("+list.map(JSON.stringify).join(",")+") === " + util.inspect(expect) + "\n"+ + "actual: "+util.inspect(actual)) + console.log('ok - ' + list.join(' ')) +} + +test([ "ruby", "ruby", "rules", "rules", "rules" ], +{ rub: 'ruby' +, ruby: 'ruby' +, rul: 'rules' +, rule: 'rules' +, rules: 'rules' +}) +test(["fool", "foom", "pool", "pope"], +{ fool: 'fool' +, foom: 'foom' +, poo: 'pool' +, pool: 'pool' +, pop: 'pope' +, pope: 'pope' +}) +test(["a", "ab", "abc", "abcd", "abcde", "acde"], +{ a: 'a' +, ab: 'ab' +, abc: 'abc' +, abcd: 'abcd' +, abcde: 'abcde' +, ac: 'acde' +, acd: 'acde' +, acde: 'acde' +}) + +console.log("0..%d", count) diff --git a/node_modules/markdown/node_modules/nopt/package.json b/node_modules/markdown/node_modules/nopt/package.json new file mode 100644 index 0000000..be74de9 --- /dev/null +++ b/node_modules/markdown/node_modules/nopt/package.json @@ -0,0 +1,36 @@ +{ + "name": "nopt", + "version": "2.1.2", + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "main": "lib/nopt.js", + "scripts": { + "test": "node lib/nopt.js" + }, + "repository": { + "type": "git", + "url": "http://github.com/isaacs/nopt" + }, + "bin": { + "nopt": "./bin/nopt.js" + }, + "license": { + "type": "MIT", + "url": "https://github.com/isaacs/nopt/raw/master/LICENSE" + }, + "dependencies": { + "abbrev": "1" + }, + "readme": "If you want to write an option parser, and have it be good, there are\ntwo ways to do it. The Right Way, and the Wrong Way.\n\nThe Wrong Way is to sit down and write an option parser. We've all done\nthat.\n\nThe Right Way is to write some complex configurable program with so many\noptions that you go half-insane just trying to manage them all, and put\nit off with duct-tape solutions until you see exactly to the core of the\nproblem, and finally snap and write an awesome option parser.\n\nIf you want to write an option parser, don't write an option parser.\nWrite a package manager, or a source control system, or a service\nrestarter, or an operating system. You probably won't end up with a\ngood one of those, but if you don't give up, and you are relentless and\ndiligent enough in your procrastination, you may just end up with a very\nnice option parser.\n\n## USAGE\n\n // my-program.js\n var nopt = require(\"nopt\")\n , Stream = require(\"stream\").Stream\n , path = require(\"path\")\n , knownOpts = { \"foo\" : [String, null]\n , \"bar\" : [Stream, Number]\n , \"baz\" : path\n , \"bloo\" : [ \"big\", \"medium\", \"small\" ]\n , \"flag\" : Boolean\n , \"pick\" : Boolean\n , \"many\" : [String, Array]\n }\n , shortHands = { \"foofoo\" : [\"--foo\", \"Mr. Foo\"]\n , \"b7\" : [\"--bar\", \"7\"]\n , \"m\" : [\"--bloo\", \"medium\"]\n , \"p\" : [\"--pick\"]\n , \"f\" : [\"--flag\"]\n }\n // everything is optional.\n // knownOpts and shorthands default to {}\n // arg list defaults to process.argv\n // slice defaults to 2\n , parsed = nopt(knownOpts, shortHands, process.argv, 2)\n console.log(parsed)\n\nThis would give you support for any of the following:\n\n```bash\n$ node my-program.js --foo \"blerp\" --no-flag\n{ \"foo\" : \"blerp\", \"flag\" : false }\n\n$ node my-program.js ---bar 7 --foo \"Mr. Hand\" --flag\n{ bar: 7, foo: \"Mr. Hand\", flag: true }\n\n$ node my-program.js --foo \"blerp\" -f -----p\n{ foo: \"blerp\", flag: true, pick: true }\n\n$ node my-program.js -fp --foofoo\n{ foo: \"Mr. Foo\", flag: true, pick: true }\n\n$ node my-program.js --foofoo -- -fp # -- stops the flag parsing.\n{ foo: \"Mr. Foo\", argv: { remain: [\"-fp\"] } }\n\n$ node my-program.js --blatzk 1000 -fp # unknown opts are ok.\n{ blatzk: 1000, flag: true, pick: true }\n\n$ node my-program.js --blatzk true -fp # but they need a value\n{ blatzk: true, flag: true, pick: true }\n\n$ node my-program.js --no-blatzk -fp # unless they start with \"no-\"\n{ blatzk: false, flag: true, pick: true }\n\n$ node my-program.js --baz b/a/z # known paths are resolved.\n{ baz: \"/Users/isaacs/b/a/z\" }\n\n# if Array is one of the types, then it can take many\n# values, and will always be an array. The other types provided\n# specify what types are allowed in the list.\n\n$ node my-program.js --many 1 --many null --many foo\n{ many: [\"1\", \"null\", \"foo\"] }\n\n$ node my-program.js --many foo\n{ many: [\"foo\"] }\n```\n\nRead the tests at the bottom of `lib/nopt.js` for more examples of\nwhat this puppy can do.\n\n## Types\n\nThe following types are supported, and defined on `nopt.typeDefs`\n\n* String: A normal string. No parsing is done.\n* path: A file system path. Gets resolved against cwd if not absolute.\n* url: A url. If it doesn't parse, it isn't accepted.\n* Number: Must be numeric.\n* Date: Must parse as a date. If it does, and `Date` is one of the options,\n then it will return a Date object, not a string.\n* Boolean: Must be either `true` or `false`. If an option is a boolean,\n then it does not need a value, and its presence will imply `true` as\n the value. To negate boolean flags, do `--no-whatever` or `--whatever\n false`\n* NaN: Means that the option is strictly not allowed. Any value will\n fail.\n* Stream: An object matching the \"Stream\" class in node. Valuable\n for use when validating programmatically. (npm uses this to let you\n supply any WriteStream on the `outfd` and `logfd` config options.)\n* Array: If `Array` is specified as one of the types, then the value\n will be parsed as a list of options. This means that multiple values\n can be specified, and that the value will always be an array.\n\nIf a type is an array of values not on this list, then those are\nconsidered valid values. For instance, in the example above, the\n`--bloo` option can only be one of `\"big\"`, `\"medium\"`, or `\"small\"`,\nand any other value will be rejected.\n\nWhen parsing unknown fields, `\"true\"`, `\"false\"`, and `\"null\"` will be\ninterpreted as their JavaScript equivalents, and numeric values will be\ninterpreted as a number.\n\nYou can also mix types and values, or multiple types, in a list. For\ninstance `{ blah: [Number, null] }` would allow a value to be set to\neither a Number or null. When types are ordered, this implies a\npreference, and the first type that can be used to properly interpret\nthe value will be used.\n\nTo define a new type, add it to `nopt.typeDefs`. Each item in that\nhash is an object with a `type` member and a `validate` method. The\n`type` member is an object that matches what goes in the type list. The\n`validate` method is a function that gets called with `validate(data,\nkey, val)`. Validate methods should assign `data[key]` to the valid\nvalue of `val` if it can be handled properly, or return boolean\n`false` if it cannot.\n\nYou can also call `nopt.clean(data, types, typeDefs)` to clean up a\nconfig object and remove its invalid properties.\n\n## Error Handling\n\nBy default, nopt outputs a warning to standard error when invalid\noptions are found. You can change this behavior by assigning a method\nto `nopt.invalidHandler`. This method will be called with\nthe offending `nopt.invalidHandler(key, val, types)`.\n\nIf no `nopt.invalidHandler` is assigned, then it will console.error\nits whining. If it is assigned to boolean `false` then the warning is\nsuppressed.\n\n## Abbreviations\n\nYes, they are supported. If you define options like this:\n\n```javascript\n{ \"foolhardyelephants\" : Boolean\n, \"pileofmonkeys\" : Boolean }\n```\n\nThen this will work:\n\n```bash\nnode program.js --foolhar --pil\nnode program.js --no-f --pileofmon\n# etc.\n```\n\n## Shorthands\n\nShorthands are a hash of shorter option names to a snippet of args that\nthey expand to.\n\nIf multiple one-character shorthands are all combined, and the\ncombination does not unambiguously match any other option or shorthand,\nthen they will be broken up into their constituent parts. For example:\n\n```json\n{ \"s\" : [\"--loglevel\", \"silent\"]\n, \"g\" : \"--global\"\n, \"f\" : \"--force\"\n, \"p\" : \"--parseable\"\n, \"l\" : \"--long\"\n}\n```\n\n```bash\nnpm ls -sgflp\n# just like doing this:\nnpm ls --loglevel silent --global --force --long --parseable\n```\n\n## The Rest of the args\n\nThe config object returned by nopt is given a special member called\n`argv`, which is an object with the following fields:\n\n* `remain`: The remaining args after all the parsing has occurred.\n* `original`: The args as they originally appeared.\n* `cooked`: The args after flags and shorthands are expanded.\n\n## Slicing\n\nNode programs are called with more or less the exact argv as it appears\nin C land, after the v8 and node-specific options have been plucked off.\nAs such, `argv[0]` is always `node` and `argv[1]` is always the\nJavaScript program being run.\n\nThat's usually not very useful to you. So they're sliced off by\ndefault. If you want them, then you can pass in `0` as the last\nargument, or any other number that you'd like to slice off the start of\nthe list.\n", + "readmeFilename": "README.md", + "_id": "nopt@2.1.2", + "dist": { + "shasum": "6240b68bda61a6ba5e2a688e3782227328f492cd" + }, + "_from": "nopt@~2.1.1", + "_resolved": "https://registry.npmjs.org/nopt/-/nopt-2.1.2.tgz" +} diff --git a/node_modules/markdown/package.json b/node_modules/markdown/package.json new file mode 100644 index 0000000..1e897ed --- /dev/null +++ b/node_modules/markdown/package.json @@ -0,0 +1,68 @@ +{ + "name": "markdown", + "version": "0.5.0", + "description": "A sensible Markdown parser for javascript", + "keywords": [ + "markdown", + "text processing", + "ast" + ], + "maintainers": [ + { + "name": "Dominic Baggott", + "email": "dominic.baggott@gmail.com", + "url": "http://evilstreak.co.uk" + }, + { + "name": "Ash Berlin", + "email": "ash_markdownjs@firemirror.com", + "url": "http://ashberlin.com" + } + ], + "contributors": [ + { + "name": "Dominic Baggott", + "email": "dominic.baggott@gmail.com", + "url": "http://evilstreak.co.uk" + }, + { + "name": "Ash Berlin", + "email": "ash_markdownjs@firemirror.com", + "url": "http://ashberlin.com" + } + ], + "bugs": { + "url": "http://github.com/evilstreak/markdown-js/issues" + }, + "licenses": [ + { + "type": "MIT", + "url": "http://www.opensource.org/licenses/mit-license.php" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/evilstreak/markdown-js.git" + }, + "main": "./lib/index.js", + "bin": { + "md2html": "./bin/md2html.js" + }, + "dependencies": { + "nopt": "~2.1.1" + }, + "devDependencies": { + "tap": "~0.3.3" + }, + "scripts": { + "test": "tap ./test/*.t.js" + }, + "readme": "# markdown-js\n\nYet another markdown parser, this time for JavaScript. There's a few\noptions that precede this project but they all treat markdown to HTML\nconversion as a single step process. You pass markdown in and get HTML\nout, end of story. We had some pretty particular views on how the\nprocess should actually look, which include:\n\n * producing well-formed HTML. This means that `em` and `strong` nesting\n is important, as is the ability to output as both HTML and XHTML\n\n * having an intermediate representation to allow processing of parsed\n data (we in fact have two, both [JsonML]: a markdown tree and an HTML tree)\n\n * being easily extensible to add new dialects without having to\n rewrite the entire parsing mechanics\n\n * having a good test suite. The only test suites we could find tested\n massive blocks of input, and passing depended on outputting the HTML\n with exactly the same whitespace as the original implementation\n\n[JsonML]: http://jsonml.org/ \"JSON Markup Language\"\n\n## Installation\n\nJust the `markdown` library:\n\n npm install markdown\n\nOptionally, install `md2html` into your path\n\n npm install -g markdown\n\n## Usage\n\n### Node\n\nThe simple way to use it with node is:\n\n```js\nvar markdown = require( \"markdown\" ).markdown;\nconsole.log( markdown.toHTML( \"Hello *World*!\" ) );\n```\n\n### Browser\n\nIt also works in a browser; here is a complete example:\n\n```html\n\n\n \n \n
    \n \n \n \n\n```\n\n### Command line\n\nAssuming you've installed the `md2html` script (see Installation,\nabove), you can convert markdown to html:\n\n```bash\n# read from a file\nmd2html /path/to/doc.md > /path/to/doc.html\n\n# or from stdin\necho 'Hello *World*!' | md2html\n```\n\n### More options\n\nIf you want more control check out the documentation in\n[lib/markdown.js] which details all the methods and parameters\navailable (including examples!). One day we'll get the docs generated\nand hosted somewhere for nicer browsing.\n\n[lib/markdown.js]: http://github.com/evilstreak/markdown-js/blob/master/lib/markdown.js\n\nMeanwhile, here's an example of using the multi-step processing to\nmake wiki-style linking work by filling in missing link references:\n\n```js\nvar md = require( \"markdown\" ).markdown,\n text = \"[Markdown] is a simple text-based [markup language]\\n\" +\n \"created by [John Gruber]\\n\\n\" +\n \"[John Gruber]: http://daringfireball.net\";\n\n// parse the markdown into a tree and grab the link references\nvar tree = md.parse( text ),\n refs = tree[ 1 ].references;\n\n// iterate through the tree finding link references\n( function find_link_refs( jsonml ) {\n if ( jsonml[ 0 ] === \"link_ref\" ) {\n var ref = jsonml[ 1 ].ref;\n\n // if there's no reference, define a wiki link\n if ( !refs[ ref ] ) {\n refs[ ref ] = {\n href: \"http://en.wikipedia.org/wiki/\" + ref.replace(/\\s+/, \"_\" )\n };\n }\n }\n else if ( Array.isArray( jsonml[ 1 ] ) ) {\n jsonml[ 1 ].forEach( find_link_refs );\n }\n else if ( Array.isArray( jsonml[ 2 ] ) ) {\n jsonml[ 2 ].forEach( find_link_refs );\n }\n} )( tree );\n\n// convert the tree into html\nvar html = md.renderJsonML( md.toHTMLTree( tree ) );\nconsole.log( html );\n```\n\n## Intermediate Representation\n\nInternally the process to convert a chunk of markdown into a chunk of\nHTML has three steps:\n\n 1. Parse the markdown into a JsonML tree. Any references found in the\n parsing are stored in the attribute hash of the root node under the\n key `references`.\n\n 2. Convert the markdown tree into an HTML tree. Rename any nodes that\n need it (`bulletlist` to `ul` for example) and lookup any references\n used by links or images. Remove the references attribute once done.\n\n 3. Stringify the HTML tree being careful not to wreck whitespace where\n whitespace is important (surrounding inline elements for example).\n\nEach step of this process can be called individually if you need to do\nsome processing or modification of the data at an intermediate stage.\nFor example, you may want to grab a list of all URLs linked to in the\ndocument before rendering it to HTML which you could do by recursing\nthrough the HTML tree looking for `a` nodes.\n\n## Running tests\n\nTo run the tests under node you will need tap installed (it's listed as a\n`devDependencies` so `npm install` from the checkout should be enough), then do\n\n $ npm test\n\n## Contributing\n\nDo the usual github fork and pull request dance. Add yourself to the\ncontributors section of [package.json](/package.json) too if you want to.\n\n## License\n\nReleased under the MIT license.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", + "readmeFilename": "README.markdown", + "_id": "markdown@0.5.0", + "dist": { + "shasum": "cbbcf5d76bc98b666bfbf9a075e9ce8a2a6f1d07" + }, + "_from": "markdown@", + "_resolved": "https://registry.npmjs.org/markdown/-/markdown-0.5.0.tgz" +} diff --git a/node_modules/markdown/seed.yml b/node_modules/markdown/seed.yml new file mode 100644 index 0000000..a15b229 --- /dev/null +++ b/node_modules/markdown/seed.yml @@ -0,0 +1,5 @@ +--- + name: markdown-js + description: JavaScript implementation of Markdown + tags: markdown parser + version: 0.1.2 diff --git a/node_modules/pushover-notifications/.npmignore b/node_modules/pushover-notifications/.npmignore new file mode 100644 index 0000000..c8f50f7 --- /dev/null +++ b/node_modules/pushover-notifications/.npmignore @@ -0,0 +1 @@ +npm-debug.log diff --git a/node_modules/pushover-notifications/LICENSE b/node_modules/pushover-notifications/LICENSE new file mode 100644 index 0000000..b7bcabb --- /dev/null +++ b/node_modules/pushover-notifications/LICENSE @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2012 Aaron Bieber + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + diff --git a/node_modules/pushover-notifications/README.md b/node_modules/pushover-notifications/README.md new file mode 100644 index 0000000..5f248dd --- /dev/null +++ b/node_modules/pushover-notifications/README.md @@ -0,0 +1,89 @@ +![Pushover](https://pushover.net/assets/pushover-header-0f47af8e08d8bef658a999a9e6584fcc.png) + +Send [pushover.net](http://pushover.net) notifications from Node.JS + +## Usage + +### Install + + npm install pushover-notifications + +### Pushover API values + +Any API paramaters, as found on https://pushover.net/api, can be passed in the object. For example, `retry` and `expire` can be added to the object being passed to `.send`! Here's an example with many different parameters. +```javascript +var msg = { + message: "This is a message", + title: "Well - this is fantastic", + sound: 'magic', + device: 'test_device', + priority: 2, + url: "http://pushover.net", + url_title: "Pushover Website" +}; +``` +## Examples + +### Sending a message +```javascript + +var push = require( 'pushover-notifications' ); + +var p = new push( { + user: process.env['PUSHOVER_USER'], + token: process.env['PUSHOVER_TOKEN'], + // onerror: function(error) {}, + // update_sounds: true // update the list of sounds every day - will + // prevent app from exiting. +}); + +var msg = { + // These values correspond to the parameters detailed on https://pushover.net/api + // 'message' is required. All other values are optional. + message: 'omg node test', // required + title: "Well - this is fantastic", + sound: 'magic', + device: 'devicename', + priority: 1 +}; + +p.send( msg, function( err, result ) { + if ( err ) { + throw err; + } + + console.log( result ); +}); +``` + +### Sending a message to multiple users +```javascript + +var users = [ + 'token1', + 'token2', + 'token3' +]; + +var msg = { + message: 'omg node test', + title: "Well - this is fantastic", + sound: 'magic' // optional + priority: 1 // optional, +}; + +for ( var i = 0, l = users.length; i < l; i++ ) { + + msg.user = users[i]; + // token can be overwritten as well. + + p.send( msg, function( err, result ) { + if ( err ) { + throw err; + } + + console.log( result ); + }); +} + +``` diff --git a/node_modules/pushover-notifications/index.js b/node_modules/pushover-notifications/index.js new file mode 100644 index 0000000..1e919c0 --- /dev/null +++ b/node_modules/pushover-notifications/index.js @@ -0,0 +1 @@ +module.exports = require( './lib/pushover' ); diff --git a/node_modules/pushover-notifications/lib/pushover.js b/node_modules/pushover-notifications/lib/pushover.js new file mode 100644 index 0000000..b5430bb --- /dev/null +++ b/node_modules/pushover-notifications/lib/pushover.js @@ -0,0 +1,180 @@ +var https = require('https'), +url = require('url'), +qs = require('querystring'), +p_url = 'https://api.pushover.net/1/messages.json'; + +function setDefaults(o) { + var def = [ + 'device', + 'title', + 'url', + 'url_title', + 'priority', + 'timestamp', + 'sound' + ]; + + var i = 0; l = def.length; + for (; i < l; i++) { + if (!o[def[i]]) { + o[def[i]] = ''; + } + } + + return o; +} + +function Pushover(opts) { + var self = this; + this.token = opts.token; + this.user = opts.user; + this.httpOptions = opts.httpOptions; + this.sounds = { + "pushover":"Pushover (default)", + "bike":"Bike", + "bugle":"Bugle", + "cashregister":"Cash Register", + "classical":"Classical", + "cosmic":"Cosmic", + "falling":"Falling", + "gamelan":"Gamelan", + "incoming":"Incoming", + "intermission":"Intermission", + "magic":"Magic", + "mechanical":"Mechanical", + "pianobar":"Piano Bar", + "siren":"Siren", + "spacealarm":"Space Alarm", + "tugboat":"Tug Boat", + "alien":"Alien Alarm (long)", + "climb":"Climb (long)", + "persistent":"Persistent (long)", + "echo":"Pushover Echo (long)", + "updown":"Up Down (long)", + "none":"None (silent)" + }; + + if (opts.debug) { + this.debug = opts.debug; + } + + if (opts.onerror) { + this.onerror = opts.onerror; + } + + if (opts.update_sounds) { + self.updateSounds(); + setInterval(function() { + self.updateSounds(); + }, 86400000); + } +} + +Pushover.prototype.errors = function(d) { + if (typeof d === 'string') { + d = JSON.parse(d); + } + + if (d.errors) { + if (this.onerror) { + this.onerror.call(null, d.errors[0]); + } else { + throw new Error(d.errors[0]); + } + } +}; + +Pushover.prototype.updateSounds = function() { + var self = this, data = ''; + var surl = 'https://api.pushover.net/1/sounds.json?token=' + self.token; + var req = https.request(url.parse(surl) , function(res) { + res.on('end', function() { + var j = JSON.parse(data); + self.errors(data); + self.sounds = j.sounds; + }); + + res.on('data', function(chunk) { + data += chunk; + }); + + }); + + req.on('error', function(e) { + err = e; + }); + + req.write(''); + req.end(); +}; + +Pushover.prototype.send = function(obj, fn) { + var self = this; + var o = url.parse(p_url); + o.method = "POST"; + + obj = setDefaults(obj); + + if (! self.sounds[ obj.sound ]) { + obj.sound = 'pushover'; + } + + var req_string = { + token: self.token || obj.token, + user: self.user || obj.user + }; + + var p; + for (p in obj) { + req_string[ p ] = obj[p]; + } + + req_string = qs.stringify(req_string); + + o.headers = { + 'Content-Length': req_string.length + }; + + var httpOpts = self.httpOptions; + if (httpOpts) { + Object.keys(httpOpts).forEach(function(key) { + o[key] = httpOpts[key]; + }); + } + + var req = https.request(o, function(res) { + if (self.debug) { + console.log(res.statusCode); + } + var err; + var data = ''; + res.on('end', function() { + self.errors(data); + if (fn) { + fn.call(null, err, data); + } + }); + + res.on('data', function(chunk) { + data += chunk; + }); + }); + + req.on('error', function(err) { + if (fn) { + fn.call(null, err); + } + // In the tests the "end" event did not get emitted if "error" was emitted, + // but to be sure that the callback is not get called twice, null the callback function + fn = null; + }); + + + if (self.debug) { + console.log (req_string); + } + req.write(req_string); + req.end(); +}; + +exports = module.exports = Pushover; diff --git a/node_modules/pushover-notifications/package.json b/node_modules/pushover-notifications/package.json new file mode 100644 index 0000000..70f1f30 --- /dev/null +++ b/node_modules/pushover-notifications/package.json @@ -0,0 +1,28 @@ +{ + "author": { + "name": "Aaron Bieber", + "email": "aaron@qbit.io" + }, + "name": "pushover-notifications", + "description": "Pushover API for node.js", + "version": "0.2.2", + "homepage": "http://github.com/qbit/node-pushover", + "repository": { + "type": "git", + "url": "https://github.com/qbit/node-pushover.git" + }, + "dependencies": {}, + "devDependencies": {}, + "optionalDependencies": {}, + "engines": { + "node": "*" + }, + "readme": "![Pushover](https://pushover.net/assets/pushover-header-0f47af8e08d8bef658a999a9e6584fcc.png)\n\nSend [pushover.net](http://pushover.net) notifications from Node.JS\n\n## Usage\n\n### Install\n\n\tnpm install pushover-notifications\n\t\n### Pushover API values\n\nAny API paramaters, as found on https://pushover.net/api, can be passed in the object. For example, `retry` and `expire` can be added to the object being passed to `.send`! Here's an example with many different parameters.\n```javascript\nvar msg = {\n\tmessage: \"This is a message\",\n\ttitle: \"Well - this is fantastic\",\n\tsound: 'magic',\n\tdevice: 'test_device',\n\tpriority: 2,\n\turl: \"http://pushover.net\",\n\turl_title: \"Pushover Website\"\n};\n```\n## Examples\n\n### Sending a message\n```javascript\n\nvar push = require( 'pushover-notifications' );\n\nvar p = new push( {\n\tuser: process.env['PUSHOVER_USER'],\n\ttoken: process.env['PUSHOVER_TOKEN'],\n\t// onerror: function(error) {},\n\t// update_sounds: true // update the list of sounds every day - will\n\t// prevent app from exiting.\n});\n\nvar msg = {\n\t// These values correspond to the parameters detailed on https://pushover.net/api\n\t// 'message' is required. All other values are optional.\n\tmessage: 'omg node test',\t// required\n\ttitle: \"Well - this is fantastic\",\n\tsound: 'magic',\n\tdevice: 'devicename',\n\tpriority: 1\n};\n\np.send( msg, function( err, result ) {\n\tif ( err ) {\n\t\tthrow err;\n\t}\n\n\tconsole.log( result );\n});\n```\n\n### Sending a message to multiple users\n```javascript\n\nvar users = [\n 'token1',\n 'token2',\n 'token3'\n];\n\nvar msg = {\n message: 'omg node test',\n title: \"Well - this is fantastic\",\n sound: 'magic' // optional\n priority: 1 // optional,\n};\n\nfor ( var i = 0, l = users.length; i < l; i++ ) {\n\n msg.user = users[i];\n // token can be overwritten as well.\n\n p.send( msg, function( err, result ) {\n if ( err ) {\n throw err;\n }\n\n console.log( result );\n });\n}\n\n```\n", + "readmeFilename": "README.md", + "_id": "pushover-notifications@0.2.2", + "dist": { + "shasum": "b151e5729b7014d84dcb71b1a86c247c124eb277" + }, + "_from": "pushover-notifications@", + "_resolved": "https://registry.npmjs.org/pushover-notifications/-/pushover-notifications-0.2.2.tgz" +} diff --git a/node_modules/pushover-notifications/test/test-onerror.js b/node_modules/pushover-notifications/test/test-onerror.js new file mode 100644 index 0000000..3f4552f --- /dev/null +++ b/node_modules/pushover-notifications/test/test-onerror.js @@ -0,0 +1,25 @@ +var push = require( '../lib/pushover.js' ); + +var p = new push( { + user: process.env['PUSHOVER_USER'], + token: process.env['PUSHOVER_TOKEN'], + update_sounds: false, + debug: true, + onerror: function(err) { + console.log('ERROR!', err); + } +}); + +var msg = { + message: 'omg node test', + sound: 'magic', + title: "Well - this is fantastic", +}; + +// console.log( p ); + +p.send( msg, function( err, result ) { + console.log( 'error', err ); + console.log( 'result', result ); + // process.exit(0); +}); diff --git a/node_modules/pushover-notifications/test/test.js b/node_modules/pushover-notifications/test/test.js new file mode 100644 index 0000000..fbb170e --- /dev/null +++ b/node_modules/pushover-notifications/test/test.js @@ -0,0 +1,22 @@ +var push = require( '../lib/pushover.js' ); + +var p = new push( { + user: process.env['PUSHOVER_USER'], + token: process.env['PUSHOVER_TOKEN'], + update_sounds: false, + debug: true +}); + +var msg = { + message: 'omg node test', + sound: 'magic', + title: "Well - this is fantastic", +}; + +// console.log( p ); + +p.send( msg, function( err, result ) { + console.log( 'error', err ); + console.log( 'result', result ); + // process.exit(0); +}); diff --git a/node_modules/pushover-notifications/test/test_multi.js b/node_modules/pushover-notifications/test/test_multi.js new file mode 100644 index 0000000..00fd7c1 --- /dev/null +++ b/node_modules/pushover-notifications/test/test_multi.js @@ -0,0 +1,21 @@ +var push = require( '../lib/pushover.js' ); + +var p = new push( { + // user: process.env['PUSHOVER_USER'], + token: process.env['PUSHOVER_TOKEN'], + debug: true +}); + +var msg = { + message: 'omg node test', + title: "Well - this is fantastic", + user: process.env['PUSHOVER_USER'] +}; + +// console.log( p ); + +p.send( msg, function( err, result ) { + console.log( err ); + console.log( result ); + process.exit(0); +}); diff --git a/node_modules/strong-cluster-connect-store/.idea/encodings.xml b/node_modules/strong-cluster-connect-store/.idea/encodings.xml new file mode 100644 index 0000000..e206d70 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/encodings.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/node_modules/strong-cluster-connect-store/.idea/inspectionProfiles/Project_Default.xml b/node_modules/strong-cluster-connect-store/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000..e8d7401 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,9 @@ + + + + \ No newline at end of file diff --git a/node_modules/strong-cluster-connect-store/.idea/inspectionProfiles/profiles_settings.xml b/node_modules/strong-cluster-connect-store/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000..3b31283 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,7 @@ + + + + \ No newline at end of file diff --git a/node_modules/strong-cluster-connect-store/.idea/jsLibraryMappings.xml b/node_modules/strong-cluster-connect-store/.idea/jsLibraryMappings.xml new file mode 100644 index 0000000..b56b20e --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/jsLibraryMappings.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/node_modules/strong-cluster-connect-store/.idea/jsLinters/jshint.xml b/node_modules/strong-cluster-connect-store/.idea/jsLinters/jshint.xml new file mode 100644 index 0000000..ac01307 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/jsLinters/jshint.xml @@ -0,0 +1,62 @@ + + + + + + diff --git a/node_modules/strong-cluster-connect-store/.idea/libraries/Node_js_Dependencies_for_strong_cluster_connect_store.xml b/node_modules/strong-cluster-connect-store/.idea/libraries/Node_js_Dependencies_for_strong_cluster_connect_store.xml new file mode 100644 index 0000000..f4fa110 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/libraries/Node_js_Dependencies_for_strong_cluster_connect_store.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/node_modules/strong-cluster-connect-store/.idea/misc.xml b/node_modules/strong-cluster-connect-store/.idea/misc.xml new file mode 100644 index 0000000..1162f43 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/misc.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/node_modules/strong-cluster-connect-store/.idea/modules.xml b/node_modules/strong-cluster-connect-store/.idea/modules.xml new file mode 100644 index 0000000..d7c2875 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/modules.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/node_modules/strong-cluster-connect-store/.idea/scopes/scope_settings.xml b/node_modules/strong-cluster-connect-store/.idea/scopes/scope_settings.xml new file mode 100644 index 0000000..922003b --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/scopes/scope_settings.xml @@ -0,0 +1,5 @@ + + + + \ No newline at end of file diff --git a/node_modules/strong-cluster-connect-store/.idea/strong-cluster-connect-store.iml b/node_modules/strong-cluster-connect-store/.idea/strong-cluster-connect-store.iml new file mode 100644 index 0000000..96e1b40 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/strong-cluster-connect-store.iml @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/node_modules/strong-cluster-connect-store/.idea/vcs.xml b/node_modules/strong-cluster-connect-store/.idea/vcs.xml new file mode 100644 index 0000000..c80f219 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/vcs.xml @@ -0,0 +1,7 @@ + + + + + + + diff --git a/node_modules/strong-cluster-connect-store/.idea/workspace.xml b/node_modules/strong-cluster-connect-store/.idea/workspace.xml new file mode 100644 index 0000000..0bb2a6c --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.idea/workspace.xml @@ -0,0 +1,476 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $PROJECT_DIR$/node_modules/mocha + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $PROJECT_DIR$ + true + + BDD + + + false + + + + + + + + $PROJECT_DIR$ + true + + BDD + + $PROJECT_DIR$/test + false + + + + + + + + + + + + + + + + + + 1373452074532 + 1373452074532 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/strong-cluster-connect-store/.jshintrc b/node_modules/strong-cluster-connect-store/.jshintrc new file mode 100644 index 0000000..b58693d --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.jshintrc @@ -0,0 +1,5 @@ +{ + "camelcase": true + , "quotmark": "single" + , "eqnull": true +} diff --git a/node_modules/strong-cluster-connect-store/.npmignore b/node_modules/strong-cluster-connect-store/.npmignore new file mode 100644 index 0000000..3d6b441 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.npmignore @@ -0,0 +1,18 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz + +pids +logs +results + +npm-debug.log + +strong-cluster-connect-store-*.tgz +coverage.html +node_modules diff --git a/node_modules/strong-cluster-connect-store/.travis.yml b/node_modules/strong-cluster-connect-store/.travis.yml new file mode 100644 index 0000000..05d299e --- /dev/null +++ b/node_modules/strong-cluster-connect-store/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.10" + - "0.11" diff --git a/node_modules/strong-cluster-connect-store/LICENSE b/node_modules/strong-cluster-connect-store/LICENSE new file mode 100644 index 0000000..4f4028a --- /dev/null +++ b/node_modules/strong-cluster-connect-store/LICENSE @@ -0,0 +1,311 @@ +Copyright (c) 2013-2014 StrongLoop, Inc. + +strong-cluster-connect-store uses a 'dual license' model. Users may use strong-cluster-connect-store under +the terms of the MIT license, or under the StrongLoop License. The text of both +is included below. + +MIT license + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +StrongLoop License + +STRONGLOOP SUBSCRIPTION AGREEMENT +PLEASE READ THIS AGREEMENT CAREFULLY BEFORE YOU AGREE TO THESE TERMS. IF YOU +ARE ACTING ON BEHALF OF AN ENTITY, THEN YOU REPRESENT THAT YOU HAVE THE +AUTHORITY TO ENTER INTO THIS AGREEMENT ON BEHALF OF THAT ENTITY. IF YOU DO NOT +AGREE TO THESE TERMS, YOU SHOULD NOT AGREE TO THE TERMS OF THIS AGREEMENT OR +INSTALL OR USE THE SOFTWARE. +This StrongLoop Subscription Agreement ("Agreement") is made by and between +StrongLoop, Inc. ("StrongLoop") with its principal place of business at 107 S. +B St, Suite 220, San Mateo, CA 94401 and the person or entity entering into this +Agreement ("Customer"). The effective date ("Effective Date") of this Agreement +is the date Customer agrees to these terms or installs or uses the Software (as +defined below). This Agreement applies to Customer's use of the Software but it +shall be superseded by any signed agreement between you and StrongLoop +concerning the Software. +1. Subscriptions and Licenses. +1.1 Subscriptions. StrongLoop offers five different subscription levels to its +customers, each as more particularly described on StrongLoop's website located +at www.strongloop.com (the "StrongLoop Site"): (1) Free; (2) Developer; (3) +Professional; (4) Gold; and (5) Platinum. The actual subscription level +applicable to Customer (the "Subscription") will be specified in the purchase +order that Customer issues to StrongLoop. This Agreement applies to Customer +regardless of the level of the Subscription selected by Customer and whether or +not Customer upgrades or downgrades its Subscription. StrongLoop hereby agrees +to provide the services as described on the StrongLoop Site for each +Subscription level during the term for which Customer has purchased the +applicable Subscription, subject to Customer paying the fees applicable to the +Subscription level purchased, if any (the "Subscription Fees"). StrongLoop may +modify the services to be provided under any Subscription upon notice to +Customer. +1.2 License Grant. Subject to the terms and conditions of this Agreement, +StrongLoop grants to Customer, during the Subscription Term (as defined in +Section 7.1 (Term and Termination) of this Agreement, a limited, non-exclusive, +non-transferable right and license, to install and use the StrongLoop Suite +software (the "Software") and the documentation made available electronically as +part of the Software (the "Documentation"), either of which may be modified +during the Term (as defined in Section 7.1 below), solely for development, +production and commercial purposes so long as Customer is using the Software to +run only one process on a given operating system at a time. This Agreement, +including but not limited to the license and restrictions contained herein, +apply to Customer regardless of whether Customer accesses the Software via +download from the StrongLoop Site or through a third-party website or service, +even if Customer acquired the Software prior to agreeing to this Agreement. +1.3 License Restrictions. Customer shall not itself, or through any parent, +subsidiary, affiliate, agent or other third party: + 1.3.1 sell, lease, license, distribute, sublicense or otherwise transfer + in whole or in part, any Software or the Documentation to a third party; + or + 1.3.2 decompile, disassemble, translate, reverse engineer or otherwise + attempt to derive source code from the Software, in whole or in part, nor + shall Customer use any mechanical, electronic or other method to trace, + decompile, disassemble, or identify the source code of the Software or + encourage others to do so, except to the limited extent, if any, that + applicable law permits such acts notwithstanding any contractual + prohibitions, provided, however, before Customer exercises any rights that + Customer believes to be entitled to based on mandatory law, Customer shall + provide StrongLoop with thirty (30) days prior written notice and provide + all reasonably requested information to allow StrongLoop to assess + Customer's claim and, at StrongLoop's sole discretion, to provide + alternatives that reduce any adverse impact on StrongLoop's intellectual + property or other rights; or + 1.3.3 allow access or permit use of the Software by any users other than + Customer's employees or authorized third-party contractors who are + providing services to Customer and agree in writing to abide by the terms + of this Agreement, provided further that Customer shall be liable for any + failure by such employees and third-party contractors to comply with the + terms of this Agreement and no usage restrictions, if any, shall be + exceeded; or + 1.3.4 create, develop, license, install, use, or deploy any third party + software or services to circumvent or provide access, permissions or + rights which violate the license keys embedded within the Software; or + 1.3.5 modify or create derivative works based upon the Software or + Documentation; or disclose the results of any benchmark test of the + Software to any third party without StrongLoop's prior written approval; + or + 1.3.6 change any proprietary rights notices which appear in the Software + or Documentation; or + 1.3.7 use the Software as part of a time sharing or service bureau + purposes or in any other resale capacity. +1.4 Third-Party Software. The Software may include individual certain software +that is owned by third parties, including individual open source software +components (the "Third-Party Software"), each of which has its own copyright and +its own applicable license conditions. Such third-party software is licensed to +Customer under the terms of the applicable third-party licenses and/or copyright +notices that can be found in the LICENSES file, the Documentation or other +materials accompanying the Software, except that Sections 5 (Warranty +Disclaimer) and 6 (Limitation of Liability) also govern Customer's use of the +third-party software. Customer agrees to comply with the terms and conditions +of the relevant third-party software licenses. +2. Support Services. StrongLoop has no obligation to provide any support for +the Software other than the support services specifically described on the +StrongLoop Site for the Subscription level procured by Customer. However, +StrongLoop has endeavored to establish a community of users of the Software who +have provided their own feedback, hints and advice regarding their experiences +in using the Software. You can find that community and user feedback on the +StrongLoop Site. The use of any information, content or other materials from, +contained in or on the StrongLoop Site are subject to the StrongLoop website +terms of use located here http://www.strongloop.com/terms-of-service. +3. Confidentiality. For purposes of this Agreement, "Confidential Information" +means any and all information or proprietary materials (in every form and media) +not generally known in the relevant trade or industry and which has been or is +hereafter disclosed or made available by StrongLoop to Customer in connection +with the transactions contemplated under this Agreement, including (i) all trade +secrets, (ii) existing or contemplated Software, services, designs, technology, +processes, technical data, engineering, techniques, methodologies and concepts +and any related information, and (iii) information relating to business plans, +sales or marketing methods and customer lists or requirements. For a period of +five (5) years from the date of disclosure of the applicable Confidential +Information, Customer shall (i) hold the Confidential Information in trust and +confidence and avoid the disclosure or release thereof to any other person or +entity by using the same degree of care as it uses to avoid unauthorized use, +disclosure, or dissemination of its own Confidential Information of a similar +nature, but not less than reasonable care, and (ii) not use the Confidential +Information for any purpose whatsoever except as expressly contemplated under +this Agreement; provided that, to the extent the Confidential Information +constitutes a trade secret under law, Customer agrees to protect such +information for so long as it qualifies as a trade secret under applicable law. +Customer shall disclose the Confidential Information only to those of its +employees and contractors having a need to know such Confidential Information +and shall take all reasonable precautions to ensure that such employees and +contractors comply with the provisions of this Section. The obligations of +Customer under this Section shall not apply to information that Customer can +demonstrate (i) was in its possession at the time of disclosure and without +restriction as to confidentiality, (ii) at the time of disclosure is generally +available to the public or after disclosure becomes generally available to the +public through no breach of agreement or other wrongful act by Customer, (iii) +has been received from a third party without restriction on disclosure and +without breach of agreement by Customer, or (iv) is independently developed by +Customer without regard to the Confidential Information. In addition, Customer +may disclose Confidential Information as required to comply with binding orders +of governmental entities that have jurisdiction over it; provided that Customer +gives StrongLoop reasonable written notice to allow StrongLoop to seek a +protective order or other appropriate remedy, discloses only such Confidential +Information as is required by the governmental entity, and uses commercially +reasonable efforts to obtain confidential treatment for any Confidential +Information disclosed. Notwithstanding the above, Customer agrees that +StrongLoop, its employees and agents shall be free to use and employ their +general skills, know-how, and expertise, and to use, disclose, and employ any +generalized ideas, concepts, know-how, methods, techniques or skills gained or +learned during the Term or thereafter. +4. Ownership. StrongLoop shall retain all intellectual property and proprietary +rights in the Software, Documentation, and related works, including but not +limited to any derivative work of the foregoing and StrongLoop's licensors shall +retain all intellectual property and proprietary rights in any Third-Party +Software that may be provided with or as a part of the Software. Customer shall +do nothing inconsistent with StrongLoop's or its licensors' title to the +Software and the intellectual property rights embodied therein, including, but +not limited to, transferring, loaning, selling, assigning, pledging, or +otherwise disposing, encumbering, or suffering a lien or encumbrance upon or +against any interest in the Software. The Software (including any Third-Party +Software) contain copyrighted material, trade secrets and other proprietary +material of StrongLoop and/or its licensors. +5. Warranty Disclaimer. THE SOFTWARE (INCLUDING ANY THIRD-PARTY SOFTWARE) AND +DOCUMENTATION MADE AVAILABLE TO CUSTOMER ARE PROVIDED "AS-IS" AND STRONGLOOP, +ON BEHALF OF ITSELF AND ITS LICENSORS, EXPRESSLY DISCLAIMS ALL WARRANTIES OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, ANY IMPLIED WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, TITLE, +PERFORMANCE, AND ACCURACY AND ANY IMPLIED WARRANTIES ARISING FROM STATUTE, +COURSE OF DEALING, COURSE OF PERFORMANCE, OR USAGE OF TRADE. STRONGLOOP DOES +NOT WARRANT THAT THE OPERATION OF THE SOFTWARE WILL BE UNINTERRUPTED OR +ERROR-FREE, THAT DEFECTS IN THE SOFTWARE WILL BE CORRECTED OR THAT THE SOFTWARE +WILL PROVIDE OR ENSURE ANY PARTICULAR RESULTS OR OUTCOME. NO ORAL OR WRITTEN +INFORMATION OR ADVICE GIVEN BY STRONGLOOP OR ITS AUTHORIZED REPRESENTATIVES +SHALL CREATE A WARRANTY OR IN ANY WAY INCREASE THE SCOPE OF THIS WARRANTY. +STRONGLOOP IS NOT OBLIGATED TO PROVIDE CUSTOMER WITH UPGRADES TO THE SOFTWARE, +BUT MAY ELECT TO DO SO IN ITS SOLE DISCRETION. SOME JURISDICTIONS DO NOT ALLOW +THE EXCLUSION OF IMPLIED WARRANTIES, SO THE ABOVE EXCLUSION MAY NOT APPLY TO +CUSTOMER.WITHOUT LIMITING THE GENERALITY OF THE FOREGOING DISCLAIMER, THE +SOFTWARE AND DOCUMENTATION ARE NOT DESIGNED, MANUFACTURED OR INTENDED FOR USE IN +THE PLANNING, CONSTRUCTION, MAINTENANCE, CONTROL, OR DIRECT OPERATION OF NUCLEAR +FACILITIES, AIRCRAFT NAVIGATION, CONTROL OR COMMUNICATION SYSTEMS, WEAPONS +SYSTEMS, OR DIRECT LIFE SUPPORT SYSTEMS. +6. Limitation of Liability. + 6.1 Exclusion of Liability. IN NO EVENT WILL STRONGLOOP OR ITS LICENSORS + BE LIABLE UNDER THIS AGREEMENT FOR ANY INDIRECT, RELIANCE, PUNITIVE, + CONSEQUENTIAL, SPECIAL, EXEMPLARY, OR INCIDENTAL DAMAGES OF ANY KIND AND + HOWEVER CAUSED (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF + BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF BUSINESS INFORMATION AND + THE LIKE), EVEN IF STRONGLOOP HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + DAMAGES. CUSTOMER BEARS FULL RESPONSIBILITY FOR USE OF THE SOFTWARE AND + THE SUBSCRIPTION AND STRONGLOOP DOES NOT GUARANTEE THAT THE USE OF THE + SOFTWARE AND SUBSCRIPTION WILL ENSURE THAT CUSTOMER'S NETWORK WILL BE + AVAILABLE, SECURE, MONITORED OR PROTECTED AGAINST ANY DOWNTIME, DENIAL OF + SERVICE ATTACKS, SECUITY BREACHES, HACKERS AND THE LIKE. IN NO EVENT WILL + STRONGLOOP'S CUMULATIVE LIABILITY FOR ANY DAMAGES, LOSSES AND CAUSES OF + ACTION (WHETHER IN CONTRACT, TORT, INCLUDING NEGLIGENCE, OR OTHERWISE) + ARISING OUT OF OR RELATED TO THIS AGREEMENT EXCEED THE GREATER OF ONE + HUNDRED DOLLARS (US$100) OR THE TOTAL SUBSCRIPTION FEES PAID BY CUSTOMER + TO STRONGLOOP IN THE TWELVE (12) MONTHS PRECEDING THE DATE THE CLAIM + ARISES. + 6.2 Limitation of Damages. IN NO EVENT WILL STRONGLOOP'S LICENSORS HAVE + ANY LIABILITY FOR ANY CLAIM ARISING IN CONNECTION WITH THIS AGREEMENT. + THE PROVISIONS OF THIS SECTION 6 ALLOCATE RISKS UNDER THIS AGREEMENT + BETWEEN CUSTOMER, STRONGLOOP AND STRONGLOOP'S SUPPLIERS. THE FOREGOING + LIMITATIONS, EXCLUSIONS AND DISCLAIMERS APPLY TO THE MAXIMUM EXTENT + PERMITTED BY APPLICABLE LAW, EVEN IF ANY REMEDY FAILS IN ITS ESSENTIAL + PURPOSE. + 6.3 Failure of Essential Purpose. THE PARTIES AGREE THAT THESE + LIMITATIONS SHALL APPLY EVEN IF THIS AGREEMENT OR ANY LIMITED REMEDY + SPECIFIED HEREIN IS FOUND TO HAVE FAILED OF ITS ESSENTIAL PURPOSE. + 6.4 Allocation of Risk. The sections on limitation of liability and + disclaimer of warranties allocate the risks in the Agreement between the + parties. This allocation is an essential element of the basis of the + bargain between the parties. +7. Term and Termination. +7.1 This Agreement shall commence on the Effective Date and continue for so long +as Customer has a valid Subscription and is current on the payment of any +Subscription Fees required to be paid for that Subscription (the "Subscription +Term"). Either party may terminate this Agreement immediately upon written +notice to the other party, and the Subscription and licenses granted hereunder +automatically terminate upon the termination of this Agreement. This Agreement +will terminate immediately without notice from StrongLoop if Customer fails to +comply with or otherwise breaches any provision of this Agreement. +7.2 All Sections other than Section 1.1 (Subscriptions) and 1.2 (Licenses) shall +survive the expiration or termination of this Agreement. +8. Subscription Fees and Payments. StrongLoop, Customer agrees to pay +StrongLoop the Subscription Fees as described on the StrongLoop Site for the +Subscription purchased unless a different amount has been agreed to in a +separate agreement between Customer and StrongLoop. In addition, Customer shall +pay all sales, use, value added, withholding, excise taxes and other tax, duty, +custom and similar fees levied upon the delivery or use of the Software and the +Subscriptions described in this Agreement. Fees shall be invoiced in full upon +StrongLoop's acceptance of Customer's purchase order for the Subscription. All +invoices shall be paid in US dollars and are due upon receipt and shall be paid +within thirty (30) days. Payments shall be made without right of set-off or +chargeback. If Customer does not pay the invoices when due, StrongLoop may +charge interest at one percent (1%) per month or the highest rate permitted by +law, whichever is lower, on the unpaid balance from the original due date. If +Customer fails to pay fees in accordance with this Section, StrongLoop may +suspend fulfilling its obligations under this Agreement (including but not +limited to suspending the services under the Subscription) until payment is +received by StrongLoop. If any applicable law requires Customer to withhold +amounts from any payments to StrongLoop under this Agreement, (a) Customer shall +effect such withholding, remit such amounts to the appropriate taxing +authorities and promptly furnish StrongLoop with tax receipts evidencing the +payments of such amounts and (b) the sum payable by Customer upon which the +deduction or withholding is based shall be increased to the extent necessary to +ensure that, after such deduction or withholding, StrongLoop receives and +retains, free from liability for such deduction or withholding, a net amount +equal to the amount StrongLoop would have received and retained absent the +required deduction or withholding. +9. General. +9.1 Compliance with Laws. Customer shall abide by all local, state, federal and +international laws, rules, regulations and orders applying to Customer's use of +the Software, including, without limitation, the laws and regulations of the +United States that may restrict the export and re-export of certain commodities +and technical data of United States origin, including the Software. Customer +agrees that it will not export or re-export the Software without the appropriate +United States or foreign government licenses. +9.2 Entire Agreement. This Agreement constitutes the entire agreement between +the parties concerning the subject matter hereof. This Agreement supersedes all +prior or contemporaneous discussions, proposals and agreements between the +parties relating to the subject matter hereof. No amendment, modification or +waiver of any provision of this Agreement shall be effective unless in writing +and signed by both parties. Any additional or different terms on any purchase +orders issued by Customer to StrongLoop shall not be binding on either party, +are hereby rejected by StrongLoop and void. +9.3 Severability. If any provision of this Agreement is held to be invalid or +unenforceable, the remaining portions shall remain in full force and effect and +such provision shall be enforced to the maximum extent possible so as to effect +the intent of the parties and shall be reformed to the extent necessary to make +such provision valid and enforceable. +9.4 Waiver. No waiver of rights by either party may be implied from any actions +or failures to enforce rights under this Agreement. +9.5 Force Majeure. Neither party shall be liable to the other for any delay or +failure to perform due to causes beyond its reasonable control (excluding +payment of monies due). +9.6 No Third Party Beneficiaries. Unless otherwise specifically stated, the +terms of this Agreement are intended to be and are solely for the benefit of +StrongLoop and Customer and do not create any right in favor of any third party. +9.7 Governing Law and Jurisdiction. This Agreement shall be governed by the +laws of the State of California, without reference to the principles of +conflicts of law. The provisions of the Uniform Computerized Information +Transaction Act and United Nations Convention on Contracts for the International +Sale of Goods shall not apply to this Agreement. The parties shall attempt to +resolve any dispute related to this Agreement informally, initially through +their respective management, and then by non-binding mediation in San Francisco +County, California. Any litigation related to this Agreement shall be brought +in the state or federal courts located in San Francisco County, California, and +only in those courts and each party irrevocably waives any objections to such +venue. +9.8 Notices. All notices must be in writing and shall be effective three (3) +days after the date sent to the other party's headquarters, Attention Chief +Financial Officer. diff --git a/node_modules/strong-cluster-connect-store/README.md b/node_modules/strong-cluster-connect-store/README.md new file mode 100644 index 0000000..ba9ac6c --- /dev/null +++ b/node_modules/strong-cluster-connect-store/README.md @@ -0,0 +1,28 @@ +# Connect Session Store for Cluster + +[![Build Status](https://travis-ci.org/strongloop/strong-cluster-connect-store.png?branch=master)](https://travis-ci.org/strongloop/strong-cluster-connect-store) +[![NPM version](https://badge.fury.io/js/strong-cluster-connect-store.png)](http://badge.fury.io/js/strong-cluster-connect-store) + +## Overview + +Strong-cluster-connect-store is an implementation of connect session store +using node's native cluster messaging. It provides an easy way for using +sessions in connect/express based applications running in a node cluster. + +Features: + +- Supports both connect and express. +- No dependencies on external services. +- Module is shipped without connect, it will use *your* version of connect + or express. +- Covered by unit-tests. + +## Documentation + +For complete documentation, see [StrongLoop Documentation | Strong Cluster Connect Store](http://docs.strongloop.com/display/DOC/Strong+Cluster+Connect+Store). + +## Installation + +```sh +$ npm install strong-cluster-connect-store +``` diff --git a/node_modules/strong-cluster-connect-store/docs.json b/node_modules/strong-cluster-connect-store/docs.json new file mode 100644 index 0000000..7ff5deb --- /dev/null +++ b/node_modules/strong-cluster-connect-store/docs.json @@ -0,0 +1,7 @@ +{ + "content": [ + {"title": "Strong Cluster Connect Store API", "depth": 2}, + "lib/cluster-store.js" + ], + "codeSectionDepth": 3 +} diff --git a/node_modules/strong-cluster-connect-store/index.js b/node_modules/strong-cluster-connect-store/index.js new file mode 100644 index 0000000..a1dd953 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/cluster-store.js'); diff --git a/node_modules/strong-cluster-connect-store/lib/cluster-store.js b/node_modules/strong-cluster-connect-store/lib/cluster-store.js new file mode 100644 index 0000000..261e7aa --- /dev/null +++ b/node_modules/strong-cluster-connect-store/lib/cluster-store.js @@ -0,0 +1,115 @@ +var inherits = require('util').inherits; +var cluster = require('cluster'); +var NativeStore = require('strong-store-cluster'); + +/** + * Documentation marker for explicit setup of the shared-state server + * in the master process. The initialization happens when this module + * is required, thus calling this function is entirely optional. + * @private + */ +function setup() { + // no-op +} + +/** + * Return the `ClusterStore` constructor that can be called to create + * a session Store to use with + * the [express-session](https://www.npmjs.org/package/express-session) + * middleware. + * + * #### Example + * ``` + // express v3.x + var session = express.session; + var SessionStore = require('strong-cluster-connect-store')(session); + + // express v4.x + var session = require('express-session'); + var SessionStore = require('strong-cluster-connect-store')(session); + + // express v3.x (backwards compatibility) + var SessionStore = require('strong-cluster-connect-store')(express); + * ``` + * + * @param {Object} connectOrSession express session or connect/express itself + * @return {function} The ClusterStore constructor. + */ +module.exports = function(connectOrSession) { + + var session = connectOrSession.session || connectOrSession; + + /** + * Connect's Store. + * @private + */ + var Store = session.Store; + + var COLLECTION_NAME = 'strong-cluster-connect-session-store'; + + /** + * Initialize a ClusterStore object with the given `options`. + * This is an internal constructor called by express-session middleware, + * you should not need to call it directly. + * @param {Object} options Options for the ClusterStore object. + * @constructor + * @extends {session.Store} + */ + function ClusterStore(options) { + Store.call(this, options); + this._collection = NativeStore.collection(COLLECTION_NAME); + } + + inherits(ClusterStore, Store); + + /** + * Fetch a session by an id and receive the session in the callback. + * @param {String} sid A string id for the session. + * @end + * @callback {Function} fn + * @param {Error} err if present, indicates an error condition. + * @param value The data stored in the collection, could be any type. + * @end + */ + ClusterStore.prototype.get = function(sid, fn) { + this._collection.get(sid, fn); + }; + + /** + * Commit the given `session` object associated with the given `sid` to the + * session store. + * @param {String} sid A string id identifying the session. + * @param {Object} session The session object. + * @end + * @callback {Function} fn + * @param {Error} err If defined, indicates an error occured. + * @end + */ + ClusterStore.prototype.set = function(sid, session, fn) { + this._collection.set(sid, session, fn); + }; + + /** + * Destroy the session associated with the given `sid`. + * @param {String} sid A String with the id of the session. + * @end + * @callback {Function} fn + * @param {Error} err If defined, indicates an error occured. + * @end + * + */ + ClusterStore.prototype.destroy = function(sid, fn){ + this._collection.del(sid, fn); + }; + + + /** + * Same as `setup()` (see above). + * @private + */ + ClusterStore.setup = setup; + + return ClusterStore; +}; + +module.exports.setup = setup; diff --git a/node_modules/strong-cluster-connect-store/package.json b/node_modules/strong-cluster-connect-store/package.json new file mode 100644 index 0000000..5885c47 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/package.json @@ -0,0 +1,55 @@ +{ + "name": "strong-cluster-connect-store", + "version": "1.0.0", + "description": "Implementation of connect session store using node's native cluster messaging", + "license": { + "name": "Dual MIT/StrongLoop", + "url": "https://github.com/strongloop/strong-cluster-connect-store/blob/master/LICENSE" + }, + "main": "index.js", + "scripts": { + "test": "mocha --reporter spec", + "lint": "./node_modules/.bin/jshint *.js test lib" + }, + "repository": { + "type": "git", + "url": "https://github.com/strongloop/strong-cluster-connect-store.git" + }, + "keywords": [ + "connect", + "express", + "cluster", + "session", + "store" + ], + "author": { + "name": "Miroslav Bajtos", + "email": "miroslav@strongloop.com" + }, + "peerDependencies": { + "strong-store-cluster": "~0.1.0" + }, + "devDependencies": { + "mocha": "~1.9.0", + "jshint": "~2.0.1", + "chai": "~1.7.2", + "cookie-parser": "^1.0.1", + "strong-store-cluster": "latest", + "request": "~2.22.0", + "async": "~0.2.9", + "express-session": "^1.0.2", + "express": "^4.1.1", + "body-parser": "^1.0.2" + }, + "engines": { + "node": ">=0.10.0" + }, + "readme": "# Connect Session Store for Cluster\n\n[![Build Status](https://travis-ci.org/strongloop/strong-cluster-connect-store.png?branch=master)](https://travis-ci.org/strongloop/strong-cluster-connect-store)\n[![NPM version](https://badge.fury.io/js/strong-cluster-connect-store.png)](http://badge.fury.io/js/strong-cluster-connect-store)\n\n## Overview\n\nStrong-cluster-connect-store is an implementation of connect session store\nusing node's native cluster messaging. It provides an easy way for using\nsessions in connect/express based applications running in a node cluster.\n\nFeatures:\n\n- Supports both connect and express.\n- No dependencies on external services.\n- Module is shipped without connect, it will use *your* version of connect\n or express.\n- Covered by unit-tests.\n \n## Documentation\n\nFor complete documentation, see [StrongLoop Documentation | Strong Cluster Connect Store](http://docs.strongloop.com/display/DOC/Strong+Cluster+Connect+Store).\n\n## Installation\n\n```sh\n$ npm install strong-cluster-connect-store\n```\n", + "readmeFilename": "README.md", + "_id": "strong-cluster-connect-store@1.0.0", + "dist": { + "shasum": "c1ee462f23b1dee4d6381fe986d83a227c32c62e" + }, + "_from": "strong-cluster-connect-store@", + "_resolved": "https://registry.npmjs.org/strong-cluster-connect-store/-/strong-cluster-connect-store-1.0.0.tgz" +} diff --git a/node_modules/strong-cluster-connect-store/test/cluster-store.js b/node_modules/strong-cluster-connect-store/test/cluster-store.js new file mode 100644 index 0000000..c8f6712 --- /dev/null +++ b/node_modules/strong-cluster-connect-store/test/cluster-store.js @@ -0,0 +1,155 @@ +var cluster = require('cluster'); +var http = require('http'); +var expect = require('chai').expect; +var express = require('express'); +var bodyParser = require('body-parser'); +var cookieParser = require('cookie-parser'); +var session = require('express-session'); +var request = require('request'); +var async = require('async'); +var ClusterStore = require('..')(session); + +var workerUrl; + +// verify we can call setup without connect in master and workers +require('..').setup(); + +if (cluster.isWorker) { + startConnectServer(); + return; +} + +describe('clustered connect server', function() { + before(setupWorkers); + after(stopWorkers); + + var KEY = 'a-key'; + var PAYLOAD = 'a-value'; + + // NOTE We assume that the cluster does a perfect round-robin + // distribution of requests among the workers + + it('shares sessions between workers', function(done) { + async.series( + [ + save, + load + ], + function(err, results) { + if (err) { + return done(err); + } + expect(results.pop().value).to.equal(PAYLOAD); + done(); + } + ); + }); + + it('destroys a session shared between workers', function(done) { + async.series( + [ + save, + destroy, + load + ], + function(err, results) { + if (err) { + return done(err); + } + expect(results.pop().value).to.equal(undefined); + done(); + } + ); + }); + + function save(next) { + sendCommand({ cmd: 'set', key: KEY, value: PAYLOAD }, next); + } + + function destroy(next) { + sendCommand({ cmd: 'del', key: KEY }, next); + } + + function load(next) { + sendCommand({ cmd: 'get', key: KEY }, next); + } +}); + +function sendCommand(command, cb) { + request( + { + url: workerUrl, + method: 'POST', + json: command + }, + function(err, res, body) { + if (err) { + return cb(err); + } + cb(null, body); + } + ); +} + +var WORKER_COUNT = 2; + +function getNumberOfWorkers() { + return Object.keys(cluster.workers).length; +} + +function setupWorkers(done) { + if (getNumberOfWorkers() > 0) { + var msg = 'Cannot setup workers: there are already other workers running.'; + return done(new Error(msg)); + } + + cluster.setupMaster({ exec: __filename }); + ClusterStore.setup(); + + var workersListening = 0; + cluster.on('listening', function(w, addr) { + if (!workerUrl) workerUrl = 'http://localhost:' + addr.port; + + workersListening++; + if (workersListening == WORKER_COUNT) { + done(); + } + }); + + for (var i = 0; i < WORKER_COUNT; i++) { + cluster.fork(); + } +} + +function stopWorkers(done) { + cluster.disconnect(done); +} + +function startConnectServer() { + var PORT = 0; // Let the OS pick any available port + var app = express() + .use(cookieParser()) + .use(session({ store: new ClusterStore(), secret: 'a-secret', key: 'sid' })) + .use(bodyParser.json()) + .use(requestHandler); + + var server = http.createServer(app).listen(PORT); + + function requestHandler(req, res) { + var result = {}; + switch (req.body.cmd) { + case 'set': + req.session[req.body.key] = req.body.value; + break; + case 'get': + result.value = req.session[req.body.key]; + break; + case 'del': + req.session.destroy(); + break; + } + + res.setHeader('Content-Type', 'text/json'); + res.end(JSON.stringify(result)); + } +} diff --git a/node_modules/strong-store-cluster/BUG.txt b/node_modules/strong-store-cluster/BUG.txt new file mode 100644 index 0000000..276a58e --- /dev/null +++ b/node_modules/strong-store-cluster/BUG.txt @@ -0,0 +1,2 @@ +A new Client() is created for each 'online' worker... but clients are never +released. diff --git a/node_modules/strong-store-cluster/LICENSE b/node_modules/strong-store-cluster/LICENSE new file mode 100644 index 0000000..2ce62f5 --- /dev/null +++ b/node_modules/strong-store-cluster/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2013 Strongloop, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/strong-store-cluster/Makefile b/node_modules/strong-store-cluster/Makefile new file mode 100644 index 0000000..6ee9d9a --- /dev/null +++ b/node_modules/strong-store-cluster/Makefile @@ -0,0 +1,19 @@ +# Makefile + +-include local.mk + +.PHONY: test default + +default: test + +test: + @npm test + +jenkins-build: jenkins-install jenkins-test + +jenkins-install: + npm install + +jenkins-test: + ./node_modules/.bin/mocha --timeout 5000 --slow 1000 --ui tdd --reporter xunit > xunit.xml + diff --git a/node_modules/strong-store-cluster/README.md b/node_modules/strong-store-cluster/README.md new file mode 100644 index 0000000..5916c03 --- /dev/null +++ b/node_modules/strong-store-cluster/README.md @@ -0,0 +1,189 @@ +# Strong Store Cluster + +Strong Store for Cluster provides a key/value collection that can be accesses by +all processes in a node cluster. + +## Example + +```javascript +// get the collection and give it a name +var collection = require('strong-store-cluster').collection('test'); + +// don't let keys expire, ever - the number represents seconds +collection.configure({ expireKeys: 0 }); + +collection.set('ThisIsMyKey', { a: 0, b: 'Hiya', c: { d: 99}}, function(err) { + if (err) { + console.error('There was an error'); + return; + } + + collection.get('ThisIsMyKey', function(err, obj) { + if (err) { + console.error('There was an error in collection.get.'); + return; + } + + console.log('The object: ',obj); + }); +}); +``` + +## API documentation + +### store.collection(name) + +Returns a Collection object which lets you share data between node processes. + +### Class: Collection + +A `Collection` instance provides access to a shared key-value store +shared by multiple node instances. + +How collections are named and stored is determined by the storage backend. The +`strong-store-cluster` implementation stores collections in the master process +(if you're using cluster), and accepts any arbitrary string as a collection +name. + +A `Collection` object is also an `EventEmitter`. + + +#### collection.configure([options]) + +* `options` (`Object`) contains configurations options to be changed + * `expireKeys` (`Number`) seconds after which keys in this + collection are to be expired. + +Set configuration options for the collection. + +Currently only one configurable option is supported: `expireKeys`. When set +to a nonzero value, keys will automatically expire after they've not been +read or updated for some time. The timeout is specified in seconds. There's no +guarantee that the key will be discared after exactly that number of seconds +has passed. However keys will never be automatically deleted _sooner_ than what +the `expireKeys` setting allows. + +It is perfectly legal to call the `configure` method from multiple node +processes (e.g. both in a worker and in the master process). However you +should be careful to set the _same_ option values every time, otherwise the +effect is undefined. + + +#### collection.get(key, callback) + +* `key` (`String`) key to retrieve +* `callback` (`Function`) called when the value has been retrieved + +Read the value associated with a particular key. The callback is called with +two arguments, `(err, value)`. When the key wasn't found in the collection, it +is automatically created and it's `value` is set to `undefined`. + + +#### collection.set(key, [value], [callback]) + +* `key` (`String`) key to set or update +* `value` (`object`) value to associate with the key +* `callback` (`Function`) called when the value has been retrieved + +Set the value associated with `key`. The `value` must be either undefined or +a value that can be serialized with JSON.stringify. + +When the `value` parameter is omitted or set to `undefined`, the key is +deleted, so effectively it's the same as calling `collection.del(key)`. + +The `callback` function receives only one argument, `err`. When the +callback is omitted, the master process does not send a confirmation +after updating the key, and any errors are silently ignored. + + +#### collection.del(key, [callback]) + +* `key` (`String`) key to delete +* `callback` (`Function`) called when the value has been retrieved + +Delete a key from the collection. + +This operation is the equivalent of setting the key to `undefined`. + +The `callback` function receives only one argument, `err`. When the +callback is omitted, the master process does not send a confirmation +after deleting the key, and any errors are silently ignored. + + +#### collection.acquire(key, callback) + +* `key` (`String`) key to delete +* `callback` (`Function`) called when the key has been locked + +Lock a key for exclusive read and write access. + +The `acquire` methods waits until it can grab an exclusive lock on the +specified key. When the lock is acquired, no other process can read, write or +delete this particular key. When the lock is no longer needed, it should be +relinquished with `keylock.release()`. + +Three parameters are passed to the `callback` function: +`(err, keylock, value)`. The `keylock` argument receives a `KeyLock` class +instance, which lets you read and manipulate the key's value as well as +eventually release the lock. The `value` argument is set to the initial value +associated with the key. + + +#### Event: 'error' + +* `err` (`Error`) + +The error event is emitted whenever an unrecoverable error is encountered. + +### Class: KeyLock + +A `KeyLock` instance represents a key that has been locked. The `KeyLock` +class implements methods that lets you manipulate the key and release +the lock. + + +#### keylock.get() + +* Returns: (`Object`) value that's currently associated with the key + +This function returns the value that's currently associated with the locked +key. + +Initially this is the same as the `value` argument that was passed to the +`collection.acquire()` callback, but it does immediately reflect changes that +are made with `keylock.set()` and `keylock.del()`. + + +#### keylock.set([value]) + +Updates the value associated with a locked key. + +The change isn't pushed back to the master process immediately; the change +is committed when the lock is released again. The change however is reflected +immediately in the return value from `keylock.get()`. + +After the lock has been released, the key can no longer be updated through the +`KeyLock` instance. Any attempt to do so will make it throw. + +Setting the value to `undefined` marks the key for deletion, e.g. it's +equivalent to `keylock.del()`. + + +#### keylock.del() + +Mark a locked key for deletion. See `keylock.set()`. + + +#### keylock.release([callback]) + +Release the lock that protects a key. If the key was updated with +`keylock.set()` or `keylock.del()`, these changes are committed. + +When a lock has been released, it is no longer possible to manipulate the +key using `KeyLock` methods. Releasing the lock twice isn't allowed either. +The `get()` method will still work but it won't reflect any value changes +that were made after releasing. + +The `callback` function receives only one argument, `err`. When the +callback is omitted, the master process does not send a confirmation +after releasing the key, and any errors are silently ignored. diff --git a/node_modules/strong-store-cluster/docs.json b/node_modules/strong-store-cluster/docs.json new file mode 100644 index 0000000..c783e25 --- /dev/null +++ b/node_modules/strong-store-cluster/docs.json @@ -0,0 +1,6 @@ +{ + "title": "Strong Store for Cluster", + "content": [ + "README.md" + ] +} diff --git a/node_modules/strong-store-cluster/index.js b/node_modules/strong-store-cluster/index.js new file mode 100644 index 0000000..35781a4 --- /dev/null +++ b/node_modules/strong-store-cluster/index.js @@ -0,0 +1,18 @@ +var assert = require('assert'); +var cluster = require('cluster'); +var VERSION = require('./package.json').version; + +if(cluster._strongStoreCluster) { + assert( + cluster._strongStoreCluster.VERSION === VERSION, + 'Multiple versions of strong-strore-cluster are being initialized.\n' + + 'This version ' + VERSION + ' is incompatible with already initialized\n' + + 'version ' + cluster._strongStoreCluster.VERSION + '.\n' + ); + module.exports = cluster._strongStoreCluster; + return; +} + +module.exports = require('./lib/lib.js'); +module.exports.VERSION = VERSION; +cluster._strongStoreCluster = module.exports; diff --git a/node_modules/strong-store-cluster/lib/collection.js b/node_modules/strong-store-cluster/lib/collection.js new file mode 100644 index 0000000..ad13a11 --- /dev/null +++ b/node_modules/strong-store-cluster/lib/collection.js @@ -0,0 +1,22 @@ + +module.exports = collection; + + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +var cluster = require('cluster'); + +if (cluster.isMaster) + var Collection = require('./master/Collection.js'); +else + var Collection = require('./worker/Collection.js'); + + +var collections = {}; + +function collection(name) { + if (!hasOwnProperty.call(collections, name)) + return collections[name] = new Collection(name); + else + return collections[name]; +} diff --git a/node_modules/strong-store-cluster/lib/lib.js b/node_modules/strong-store-cluster/lib/lib.js new file mode 100644 index 0000000..99b93fd --- /dev/null +++ b/node_modules/strong-store-cluster/lib/lib.js @@ -0,0 +1,8 @@ + +var cluster = require('cluster'), + collection = require('./collection.js'); + +exports.collection = collection; + +if (cluster.isMaster) + require('./master/setup.js'); diff --git a/node_modules/strong-store-cluster/lib/master/Client.js b/node_modules/strong-store-cluster/lib/master/Client.js new file mode 100644 index 0000000..7609bd5 --- /dev/null +++ b/node_modules/strong-store-cluster/lib/master/Client.js @@ -0,0 +1,107 @@ + + +module.exports = Client; + + +var collection = require('../collection.js'); + + +function Client(worker) { + this._worker = worker; + this._id = worker.id; + this._locks = {}; + + this._onMessage = this.onMessage.bind(this); + + worker.on('message', this._onMessage); +} + +Client.prototype.onMessage = function(msg) { + if (msg.type === 'DSM_REQUEST') + this[msg.method](msg); +}; + +Client.prototype.get = function(msg) { + var self = this, + entry = collection(msg.collection)._entry(msg.key); + + entry.get(this._id, function(err, json) { + if (err) self._sendError(msg, err); + else self._sendReply(msg, { json: json }); + }); +}; + +Client.prototype.set = function(msg) { + var self = this, + entry = collection(msg.collection)._entry(msg.key); + + entry.set(msg.json, this._id, function(err) { + if (err) self._sendError(msg, err); + else self._sendReply(msg); + }); +}; + +Client.prototype.acquire = function(msg) { + var self = this, + entry = collection(msg.collection)._entry(msg.key); + + entry.acquire(this._id, function(err, json) { + if (err) self._sendError(msg, err); + else self._sendReply(msg, { json: json }); + }); +}; + +Client.prototype.release = function(msg) { + var self = this, + entry = collection(msg.collection)._entry(msg.key); + + entry.release(this._id, function(err, json) { + if (err) self._sendError(msg, err); + else self._sendReply(msg); + }); +}; + +Client.prototype.setRelease = function(msg) { + var self = this, + entry = collection(msg.collection)._entry(msg.key); + + entry.setRelease(msg.json, this._id, function(err) { + if (err) self._sendError(msg, err); + else self._sendReply(msg); + }); +}; + +Client.prototype.configure = function(msg) { + var coll = collection(msg.collection), + err = undefined; + + try { + coll._applyConfig(msg.config); + this._sendReply(msg); + } catch (err) { + this._sendError(msg, err); + } +}; + +// This function clobbers the data argument if specified! +Client.prototype._sendReply = function(msg, data) { + if (!msg.requestId) + return; + + data = data || {}; + data.type = 'DSM_REPLY'; + data.requestId = msg.requestId; + data.err = undefined; + this._worker.send(data); +}; + +Client.prototype._sendError = function(msg, err) { + if (!msg.requestId) + return; + + var data = {}; + data.type = 'DSM_REPLY'; + data.requestId = msg.requestId; + data.err = err; + this._worker.send(data); +}; diff --git a/node_modules/strong-store-cluster/lib/master/Collection.js b/node_modules/strong-store-cluster/lib/master/Collection.js new file mode 100644 index 0000000..7718f21 --- /dev/null +++ b/node_modules/strong-store-cluster/lib/master/Collection.js @@ -0,0 +1,153 @@ + +module.exports = Collection; + + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +var assert = require('assert'), + inherits = require('util').inherits, + EventEmitter = require('events').EventEmitter; + +var Entry = require('./Entry.js'), + KeyLock = require('./KeyLock.js'); + + +function Collection(name) { + this._name = name; + this._entries = {}; + this._count = 0; + this._expireKeys = null; + this._expireKeysTimer = null; + + this._sweep = this._sweep.bind(this); +} + +inherits(Collection, EventEmitter); + +Collection.prototype._entry = function(key) { + if (!hasOwnProperty.call(this._entries, key)) { + if (!this._count++ && this._expireKeys) + this._startExpireKeysTimer(); + + return this._entries[key] = new Entry(this, key); + } else { + return this._entries[key]; + } +}; + +Collection.prototype._remove = function(key) { + assert(hasOwnProperty.call(this._entries, key)); + delete this._entries[key]; + + if (!--this._count && this.expireKeys) + this._stopExpireKeysTimer(); +}; + +Collection.prototype.get = function(key, cb) { + this._entry(key).get(-1, function(err, json) { + if (err) + cb(err); + else if (!json) + cb(null, undefined); + else + cb(null, JSON.parse(json)); + }); +}; + +Collection.prototype.set = function(key, value, cb) { + cb = cb || noop; + this._entry(key).set(JSON.stringify(value), -1, cb); +}; + +Collection.prototype.del = function(key, cb) { + cb = cb || noop; + this._entry(key).set(undefined, -1, cb); +}; + +Collection.prototype.acquire = function(key, cb) { + var self = this, + entry = this._entry(key); + + entry.acquire(-1, function(err, json) { + var lock = new KeyLock(entry, json); + return cb(null, lock, lock.get()); + }); +}; + +Collection.prototype._applyConfig = function(config) { + config = config || {}; + + for (var key in config) { + if (!hasOwnProperty.call(config, key)) + continue; + + switch (key) { + case 'expireKeys': + if (config.expireKeys === this._expireKeys) + break; + + this._stopExpireKeysTimer(); + this._expireKeys = config.expireKeys; + this._startExpireKeysTimer(); + break; + + default: + throw new Error('Unspported configuration option: ' + key); + } + } +}; + +Collection.prototype._startExpireKeysTimer = function() { + assert(!this._expireKeysTimer); + + if (!this._expireKeys || !this._count) + return; + + var interval = Math.ceil(this._expireKeys * 1000 / 2); + + this._expireKeysTimer = setInterval(this._sweep, interval); + this._expireKeysTimer.unref(); +}; + +Collection.prototype._stopExpireKeysTimer = function() { + if (!this._expireKeysTimer) + return; + + clearInterval(this._expireKeysTimer); + this._expireKeysTimer = null; +}; + +Collection.prototype._sweep = function() { + var entries = this._entries, + key; + + for (key in entries) { + if (!hasOwnProperty.call(entries, key)) + continue; + + if (entries[key].age(1) > 2) + this._remove(key); + } + + if (!this._count) + this._stopExpireKeysTimer(); +}; + +Collection.prototype.configure = function(config) { + var self = this; + + try { + this._applyConfig(config); + + } catch (err) { + process.nextTick(function() { + self.emit('error', err); + }); + } + + return this; +}; + + +function noop() { +} diff --git a/node_modules/strong-store-cluster/lib/master/Entry.js b/node_modules/strong-store-cluster/lib/master/Entry.js new file mode 100644 index 0000000..1a26423 --- /dev/null +++ b/node_modules/strong-store-cluster/lib/master/Entry.js @@ -0,0 +1,103 @@ + +module.exports = Entry; + + +var assert = require('assert'); + + +function Entry(collection, key) { + this._collection = collection; + this._key = key; + this._value = undefined; + this._queue = []; + this._age = 0; +} + +Entry.prototype.get = function(requestor, cb) { + var self = this; + + if (!this._queue.length) { + var value = self._value; + self._age = 0; + + process.nextTick(function() { + cb(null, value); + }); + + } else { + this.acquire(requestor, function(err) { + self.release(requestor, noop); + cb(null, self._value); + }); + } +}; + +Entry.prototype.set = function(newValue, requestor, cb) { + var self = this; + + if (!this._queue.length) { + this._value = newValue; + this._age = 0; + + if (newValue === undefined) + this._collection._remove(this._key); + + process.nextTick(cb); + + } else { + this.acquire(requestor, function() { + self._value = newValue; + + if (newValue !== undefined) + self.release(requestor, cb); + else { + self._collection._remove(self._key); + cb(); + } + }); + } +}; + +Entry.prototype.acquire = function(requestor, cb) { + var self = this; + + this._queue.push([cb, requestor]); + + if (this._queue.length === 1) { + process.nextTick(function() { + cb(null, self._value); + }); + } +}; + +Entry.prototype.release = function(requestor, cb) { + var self = this, + queue = this._queue; + + setImmediate(function() { + assert.strictEqual(requestor, queue.shift()[1]); + + self._age = 0; + cb(); + + if (queue.length) + queue[0][0](null, self._value); + }); +}; + +Entry.prototype.setRelease = function(newValue, requestor, cb) { + this._value = newValue; + this.release(requestor, cb); +}; + + +Entry.prototype.age = function(d) { + if (this._queue.length) + return this._age = 0; + else + return this._age += (d || 0); +}; + + +function noop() { +} diff --git a/node_modules/strong-store-cluster/lib/master/KeyLock.js b/node_modules/strong-store-cluster/lib/master/KeyLock.js new file mode 100644 index 0000000..79cfb15 --- /dev/null +++ b/node_modules/strong-store-cluster/lib/master/KeyLock.js @@ -0,0 +1,51 @@ + + +module.exports = KeyLock; + + +function KeyLock(entry, json) { + this._entry = entry; + this._json = json; + this._updated = false; + this._released = false; +} + +KeyLock.prototype.get = function() { + if (!this._json) + return undefined; + else + return JSON.parse(this._json); +}; + +KeyLock.prototype.set = function(newValue) { + if (this._released) + throw new Error("Can't set after releasing a lock."); + + this._json = JSON.stringify(newValue); + this._updated = true; +}; + +KeyLock.prototype.del = function() { + if (this._released) + throw new Error("Can't delete after releasing a lock."); + + this._json = undefined; + this._updated = true; +}; + +KeyLock.prototype.release = function(cb) { + if (this._released) + throw new Error('KeyLock has already been released.'); + + cb = cb || noop; + this._released = true; + + if (!this._updated) + this._entry.release(-1, cb); + else + this._entry.setRelease(this._json, -1, cb); +}; + + +function noop() { +} diff --git a/node_modules/strong-store-cluster/lib/master/setup.js b/node_modules/strong-store-cluster/lib/master/setup.js new file mode 100644 index 0000000..ed40a54 --- /dev/null +++ b/node_modules/strong-store-cluster/lib/master/setup.js @@ -0,0 +1,10 @@ + +var cluster = require('cluster'), + Client = require('./Client.js'); + +for (var i = 0; i < cluster.workers.length; i++) + new Client(cluster.workers[i]); + +cluster.on('online', function(worker) { + new Client(worker); +}); diff --git a/node_modules/strong-store-cluster/lib/worker/Collection.js b/node_modules/strong-store-cluster/lib/worker/Collection.js new file mode 100644 index 0000000..66d960b --- /dev/null +++ b/node_modules/strong-store-cluster/lib/worker/Collection.js @@ -0,0 +1,68 @@ + +module.exports = Collection; + + +var inherits = require('util').inherits, + EventEmitter = require('events').EventEmitter, + KeyLock = require('./KeyLock.js'), + request = require('./request.js'); + + +function Collection(name) { + this._name = name; +} + +inherits(Collection, EventEmitter); + +Collection.prototype.get = function(key, cb) { + this._request('get', key, null, function(err, msg) { + if (err) + return cb(err); + else if (!msg.json) + return cb(null, undefined); + else + return cb(null, JSON.parse(msg.json)); + }); +}; + +Collection.prototype.set = function(key, value, cb) { + var data = { json: JSON.stringify(value) }; + this._request('set', key, data, cb && function(err, msg) { + return cb(err); + }); +}; + +Collection.prototype.del = function(key, cb) { + return this._request('set', key, null, cb && function(err, msg) { + return cb(err); + }); +}; + +Collection.prototype.acquire = function(key, cb) { + var self = this; + + this._request('acquire', key, null, function(err, msg) { + if (err) + return cb(err); + + var json = msg.json; + var lock = new KeyLock(self, key, json); + cb(null, lock, lock.get()); + }); +}; + +Collection.prototype.configure = function(config) { + var self = this; + + this._request('configure', null, { config: config }, function(err, msg) { + if (err) + self.emit('error', err); + }); + + return this; +}; + +// This function clobbers `data` if specified +Collection.prototype._request = function(method, key, data, cb) { + request(method, this._name, key, data, cb); +}; diff --git a/node_modules/strong-store-cluster/lib/worker/KeyLock.js b/node_modules/strong-store-cluster/lib/worker/KeyLock.js new file mode 100644 index 0000000..7372948 --- /dev/null +++ b/node_modules/strong-store-cluster/lib/worker/KeyLock.js @@ -0,0 +1,56 @@ + +module.exports = KeyLock; + + +var request = require('./request.js'); + + +function KeyLock(collection, key, json) { + this._collection = collection; + this._key = key; + this._json = json; + this._updated = false; + this._released = false; +} + +KeyLock.prototype.get = function() { + if (!this._json) + return undefined; + else + return JSON.parse(this._json); +}; + +KeyLock.prototype.set = function(newValue) { + if (this._released) + throw new Error("Can't set after releasing a lock."); + + this._json = JSON.stringify(newValue); + this._updated = true; +}; + +KeyLock.prototype.del = function() { + if (this._released) + throw new Error("Can't delete after releasing a lock."); + + this._json = undefined; + this._updated = true; +}; + +KeyLock.prototype.release = function(cb) { + if (this._released) + throw new Error('KeyLock has already been released.'); + + this._released = true; + + if (!this._updated) + this._collection._request('release', this._key, null, cb && afterRelease); + else + this._collection._request('setRelease', + this._key, + { json: this._json }, + cb && afterRelease); + + function afterRelease(err, msg) { + return cb(err); + } +}; diff --git a/node_modules/strong-store-cluster/lib/worker/request.js b/node_modules/strong-store-cluster/lib/worker/request.js new file mode 100644 index 0000000..647cc83 --- /dev/null +++ b/node_modules/strong-store-cluster/lib/worker/request.js @@ -0,0 +1,51 @@ + + +module.exports = request; +process.on('message', onMessage); + + +var requestIdCounter = 0; +var requestCallbacks = {}; + + +// This function clobbers `data` if specified +function request(method, collection, key, data, cb) { + data = data || {}; + + data.type = 'DSM_REQUEST'; + data.method = method; + data.collection = collection; + data.key = key; + + if (cb) { + var requestId = getRequestId(); + requestCallbacks[requestId] = cb; + data.requestId = requestId; + } + + process.send(data); +} + + +function onMessage(msg) { + if (msg.type !== 'DSM_REPLY') + return; + + var requestId = msg.requestId; + var cb = requestCallbacks[requestId]; + delete requestCallbacks[requestId]; + + if (msg.err) { + var err = new Error('Master error: ' + msg.err); + return cb(err); + } + + cb(null, msg); +} + + +function getRequestId() { + return ++requestIdCounter; +} + + diff --git a/node_modules/strong-store-cluster/package.json b/node_modules/strong-store-cluster/package.json new file mode 100644 index 0000000..0401b3f --- /dev/null +++ b/node_modules/strong-store-cluster/package.json @@ -0,0 +1,44 @@ +{ + "name": "strong-store-cluster", + "version": "0.1.3", + "description": "In-memory key/value store for the node's native cluster.", + "readmeFilename": "README.md", + "license": "MIT", + "keywords": [ + "cluster", + "store" + ], + "author": { + "name": "Bert Belder", + "email": "bert@strongloop.com" + }, + "repository": { + "type": "git", + "url": "https://github.com/strongloop/strong-store-cluster.git" + }, + "bugs": { + "url": "https://github.com/strongloop/strong-store-cluster/issues" + }, + "main": "index.js", + "scripts": { + "blanket": { + "pattern": "//^((?!(node_modules|test)).)*$/" + }, + "coverage": "mocha -r blanket -R html-cov > coverage_strong-store-cluster.html", + "test": "mocha --reporter spec --timeout 5000 --slow 1000 --ui tdd" + }, + "devDependencies": { + "blanket": "latest", + "mocha": "~1.9.0" + }, + "engines": { + "node": ">=0.10" + }, + "readme": "# Strong Store Cluster\n\nStrong Store for Cluster provides a key/value collection that can be accesses by\nall processes in a node cluster.\n\n## Example\n\n```javascript\n// get the collection and give it a name\nvar collection = require('strong-store-cluster').collection('test');\n\n// don't let keys expire, ever - the number represents seconds\ncollection.configure({ expireKeys: 0 });\n\ncollection.set('ThisIsMyKey', { a: 0, b: 'Hiya', c: { d: 99}}, function(err) {\n if (err) {\n console.error('There was an error');\n return;\n }\n\n collection.get('ThisIsMyKey', function(err, obj) {\n if (err) {\n console.error('There was an error in collection.get.');\n return;\n }\n\n console.log('The object: ',obj);\n });\n});\n```\n\n## API documentation\n\n### store.collection(name)\n\nReturns a Collection object which lets you share data between node processes.\n\n### Class: Collection\n\nA `Collection` instance provides access to a shared key-value store\nshared by multiple node instances.\n\nHow collections are named and stored is determined by the storage backend. The\n`strong-store-cluster` implementation stores collections in the master process\n(if you're using cluster), and accepts any arbitrary string as a collection\nname.\n\nA `Collection` object is also an `EventEmitter`.\n\n\n#### collection.configure([options])\n\n* `options` (`Object`) contains configurations options to be changed\n * `expireKeys` (`Number`) seconds after which keys in this\n collection are to be expired.\n\nSet configuration options for the collection.\n\nCurrently only one configurable option is supported: `expireKeys`. When set\nto a nonzero value, keys will automatically expire after they've not been\nread or updated for some time. The timeout is specified in seconds. There's no\nguarantee that the key will be discared after exactly that number of seconds\nhas passed. However keys will never be automatically deleted _sooner_ than what\nthe `expireKeys` setting allows.\n\nIt is perfectly legal to call the `configure` method from multiple node\nprocesses (e.g. both in a worker and in the master process). However you\nshould be careful to set the _same_ option values every time, otherwise the\neffect is undefined.\n\n\n#### collection.get(key, callback)\n\n* `key` (`String`) key to retrieve\n* `callback` (`Function`) called when the value has been retrieved\n\nRead the value associated with a particular key. The callback is called with\ntwo arguments, `(err, value)`. When the key wasn't found in the collection, it\nis automatically created and it's `value` is set to `undefined`.\n\n\n#### collection.set(key, [value], [callback])\n\n* `key` (`String`) key to set or update\n* `value` (`object`) value to associate with the key\n* `callback` (`Function`) called when the value has been retrieved\n\nSet the value associated with `key`. The `value` must be either undefined or\na value that can be serialized with JSON.stringify.\n\nWhen the `value` parameter is omitted or set to `undefined`, the key is\ndeleted, so effectively it's the same as calling `collection.del(key)`.\n\nThe `callback` function receives only one argument, `err`. When the\ncallback is omitted, the master process does not send a confirmation\nafter updating the key, and any errors are silently ignored.\n\n\n#### collection.del(key, [callback])\n\n* `key` (`String`) key to delete\n* `callback` (`Function`) called when the value has been retrieved\n\nDelete a key from the collection.\n\nThis operation is the equivalent of setting the key to `undefined`.\n\nThe `callback` function receives only one argument, `err`. When the\ncallback is omitted, the master process does not send a confirmation\nafter deleting the key, and any errors are silently ignored.\n\n\n#### collection.acquire(key, callback)\n\n* `key` (`String`) key to delete\n* `callback` (`Function`) called when the key has been locked\n\nLock a key for exclusive read and write access.\n\nThe `acquire` methods waits until it can grab an exclusive lock on the\nspecified key. When the lock is acquired, no other process can read, write or\ndelete this particular key. When the lock is no longer needed, it should be\nrelinquished with `keylock.release()`.\n\nThree parameters are passed to the `callback` function:\n`(err, keylock, value)`. The `keylock` argument receives a `KeyLock` class\ninstance, which lets you read and manipulate the key's value as well as\neventually release the lock. The `value` argument is set to the initial value\nassociated with the key.\n\n\n#### Event: 'error'\n\n* `err` (`Error`)\n\nThe error event is emitted whenever an unrecoverable error is encountered.\n\n### Class: KeyLock\n\nA `KeyLock` instance represents a key that has been locked. The `KeyLock`\nclass implements methods that lets you manipulate the key and release\nthe lock.\n\n\n#### keylock.get()\n\n* Returns: (`Object`) value that's currently associated with the key\n\nThis function returns the value that's currently associated with the locked\nkey.\n\nInitially this is the same as the `value` argument that was passed to the\n`collection.acquire()` callback, but it does immediately reflect changes that\nare made with `keylock.set()` and `keylock.del()`.\n\n\n#### keylock.set([value])\n\nUpdates the value associated with a locked key.\n\nThe change isn't pushed back to the master process immediately; the change\nis committed when the lock is released again. The change however is reflected\nimmediately in the return value from `keylock.get()`.\n\nAfter the lock has been released, the key can no longer be updated through the\n`KeyLock` instance. Any attempt to do so will make it throw.\n\nSetting the value to `undefined` marks the key for deletion, e.g. it's\nequivalent to `keylock.del()`.\n\n\n#### keylock.del()\n\nMark a locked key for deletion. See `keylock.set()`.\n\n\n#### keylock.release([callback])\n\nRelease the lock that protects a key. If the key was updated with\n`keylock.set()` or `keylock.del()`, these changes are committed.\n\nWhen a lock has been released, it is no longer possible to manipulate the\nkey using `KeyLock` methods. Releasing the lock twice isn't allowed either.\nThe `get()` method will still work but it won't reflect any value changes\nthat were made after releasing.\n\nThe `callback` function receives only one argument, `err`. When the\ncallback is omitted, the master process does not send a confirmation\nafter releasing the key, and any errors are silently ignored.\n", + "_id": "strong-store-cluster@0.1.3", + "dist": { + "shasum": "aaab17a025b6b8809c0c1f7a796e49254cd3386c" + }, + "_from": "strong-store-cluster@~0.1.0", + "_resolved": "https://registry.npmjs.org/strong-store-cluster/-/strong-store-cluster-0.1.3.tgz" +} diff --git a/node_modules/strong-store-cluster/test/concurrent-inc.js b/node_modules/strong-store-cluster/test/concurrent-inc.js new file mode 100644 index 0000000..e134da1 --- /dev/null +++ b/node_modules/strong-store-cluster/test/concurrent-inc.js @@ -0,0 +1,17 @@ + +var helper = require('./helper/cluster-helper'); + +suite('concurrent increment', function() { + test('master only', function(cb) { + helper.run('do-concurrent-inc', true, 0, 4, cb); + }); + + test('four workers', function(cb) { + helper.run('do-concurrent-inc', false, 4, 4, cb); + }); + + test('master and four workers', function(cb) { + helper.run('do-concurrent-inc', true, 4, 4, cb); + }); +}); + diff --git a/node_modules/strong-store-cluster/test/get-set-del.js b/node_modules/strong-store-cluster/test/get-set-del.js new file mode 100644 index 0000000..4d8b382 --- /dev/null +++ b/node_modules/strong-store-cluster/test/get-set-del.js @@ -0,0 +1,12 @@ + +var helper = require('./helper/cluster-helper'); + +suite('get-set-del', function() { + test('master', function(cb) { + helper.run('do-get-set-del', true, 0, 1, cb); + }); + + test('worker', function(cb) { + helper.run('do-get-set-del', false, 1, 1, cb); + }); +}); diff --git a/node_modules/strong-store-cluster/test/helper/cluster-helper.js b/node_modules/strong-store-cluster/test/helper/cluster-helper.js new file mode 100644 index 0000000..a9eabc3 --- /dev/null +++ b/node_modules/strong-store-cluster/test/helper/cluster-helper.js @@ -0,0 +1,83 @@ + + +var assert = require('assert'), + cluster = require('cluster'); + + +if (!process.env.CLUSTER_TEST) { + // We're require()'d by the test harness. Export a function that start + // the test. + + exports.run = function(filename, inMaster, workers, concurrency, cb) { + assert(filename); + assert(inMaster || workers); + assert(concurrency > 0); + + var env = {}; + for (var key in process.env) + env[key] = process.env[key]; + + env.CLUSTER_TEST = require.resolve('./' + filename); + env.CLUSTER_TEST_IN_MASTER = inMaster; + env.CLUSTER_TEST_WORKERS = workers; + env.CLUSTER_TEST_CONCURRENCY = concurrency; + + var cp = require('child_process').fork(module.filename, + {env: env, stdio: 'inherit'}); + + cp.on('exit', function(exitCode, termSig) { + assert(exitCode === 0); + assert(!termSig); + + cb(); + }); + }; + +} else { + // We're being spawned as a standalone process. Execute the test and/or spawn + // cluster workers that do. + + var filename = process.env.CLUSTER_TEST, + inMaster = !!+process.env.CLUSTER_TEST_IN_MASTER, + workers = ~~ + process.env.CLUSTER_TEST_WORKERS, + concurrency = ~~ + process.env.CLUSTER_TEST_CONCURRENCY; + + var test = require(filename); + + // Both the master and the worker have .setup() always called once. + test.setup && test.setup(); + + var waiting = 0; + + // If we're the master process, spawn a number of workers. + if (cluster.isMaster && workers) { + for (var i = 0; i < workers; i++) + var worker = cluster.fork(); + + waiting += workers; + + cluster.on('exit', function(worker, exitCode, termSig) { + assert(exitCode === 0); + assert(!termSig); + + done(); + }); + } + + // If we're either a worker, or the master is supposed to run the tests, + // run the test cases. + if (cluster.isWorker || inMaster) { + waiting += concurrency; + + for (var i = 0; i < concurrency; i++) + test.run(done); + } +} + + +function done() { + assert(--waiting >= 0); + if (waiting === 0) + return test.teardown && test.teardown(); +} + diff --git a/node_modules/strong-store-cluster/test/helper/do-concurrent-inc.js b/node_modules/strong-store-cluster/test/helper/do-concurrent-inc.js new file mode 100644 index 0000000..b7e6640 --- /dev/null +++ b/node_modules/strong-store-cluster/test/helper/do-concurrent-inc.js @@ -0,0 +1,51 @@ + +exports.run = run; +exports.teardown = teardown; + + +var ROUNDS = 100; +assert = require('assert'), +cluster = require('cluster'), +store = require('../..'); + + +function run(cb) { + var left = ROUNDS, + coll = store.collection('counter'); + + increment(); + + function increment() { + coll.acquire('counter', function(err, lock, val) { + assert(!err); + + if (!val) + val = 1; + else + val++; + + lock.set(val); + lock.release(); + + if (--left > 0) + increment(); + else + cb(); + }); + } +} + + +function teardown() { + if (cluster.isWorker) + process._channel.unref(); + + if (cluster.isMaster) { + store.collection('counter').get('counter', function(err, value) { + assert(value % ROUNDS === 0); + assert(value >= ROUNDS); + }); + } +} + + diff --git a/node_modules/strong-store-cluster/test/helper/do-get-set-del.js b/node_modules/strong-store-cluster/test/helper/do-get-set-del.js new file mode 100644 index 0000000..8e0cfa8 --- /dev/null +++ b/node_modules/strong-store-cluster/test/helper/do-get-set-del.js @@ -0,0 +1,60 @@ + +exports.run = run; +exports.teardown = teardown; + + +var assert = require('assert'), + cluster = require('cluster'), + store = require('../..'); + + +function run(cb) { + var testsRun = 0; + + testWith('test1', 'key1', 'zulis', onDone); + testWith('test2', 'quux', 'stoll', onDone); + testWith('test2', 'key1', 'urals', onDone); + testWith('test2', 'key2', 'quipp', onDone); + + function onDone() { + if (++testsRun === 4) + cb(); + } +} + + +function testWith(collectionName, key, testValue, cb) { + var coll = store.collection(collectionName); + + coll.get(key, function(err, value) { + assert(!err); + assert(value === undefined); + + coll.set(key, testValue, function(err) { + assert(!err); + + coll.get(key, function(err, value) { + assert(!err); + assert(value === testValue); + + coll.del(key, function(err) { + assert(!err); + + coll.get(key, function(err, value) { + assert(!err); + assert(value === undefined); + + cb(); + }); + }); + }); + }); + }); +} + + + +function teardown() { + if (cluster.isWorker) + process._channel.unref(); +} diff --git a/node_modules/strong-store-cluster/test/helper/do-lock-get-set-del.js b/node_modules/strong-store-cluster/test/helper/do-lock-get-set-del.js new file mode 100644 index 0000000..fe5cb3b --- /dev/null +++ b/node_modules/strong-store-cluster/test/helper/do-lock-get-set-del.js @@ -0,0 +1,80 @@ + +exports.run = run; +exports.teardown = teardown; + + +var assert = require('assert'), + cluster = require('cluster'), + store = require('../..'); + + +function run(cb) { + var testsRun = 0; + + testWith('test1', 'key1', {foo: 'zulis'}, onDone); + testWith('test2', 'quux', ['stoll'], onDone); + testWith('test2', 'key1', 'urals', onDone); + testWith('test2', 'key2', 42, onDone); + + function onDone() { + if (++testsRun === 4) + cb(); + } +} + + +function testWith(collectionName, key, testValue, cb) { + var coll = store.collection(collectionName); + + coll.set(key, testValue, function(err, value) { + assert(!err); + + coll.acquire(key, function(err, lock, value) { + assert(!err); + + assert.deepEqual(testValue, value); + assert.deepEqual(testValue, lock.get()); + + // Non-primitive values should be deep-cloned. + if (typeof testValue === 'object') { + assert(testValue !== value); + assert(testValue !== lock.get()); + } + + lock.set('other'); + assert('other' === lock.get()); + + lock.release(function(err) { + assert(!err); + }); + + coll.acquire(key, function(err, lock, value) { + assert(!err); + + assert('other' === value); + assert('other' === lock.get()); + + lock.del(); + assert(undefined === lock.get()); + + lock.release(function(err) { + assert(!err); + }); + + coll.get(key, function(err, value) { + assert(!err); + assert(undefined === value); + + // That was it! + cb(); + }); + }); + }); + }); +} + + +function teardown() { + if (cluster.isWorker) + process._channel.unref(); +} diff --git a/node_modules/strong-store-cluster/test/lock-get-set-del.js b/node_modules/strong-store-cluster/test/lock-get-set-del.js new file mode 100644 index 0000000..e826340 --- /dev/null +++ b/node_modules/strong-store-cluster/test/lock-get-set-del.js @@ -0,0 +1,12 @@ + +var helper = require('./helper/cluster-helper'); + +suite('lock-get-set-del', function() { + test('master', function(cb) { + helper.run('do-lock-get-set-del', true, 0, 1, cb); + }); + + test('worker', function(cb) { + helper.run('do-lock-get-set-del', false, 1, 1, cb); + }); +}); diff --git a/package.json b/package.json index 0f4e12d..ed9c1e8 100644 --- a/package.json +++ b/package.json @@ -20,8 +20,10 @@ "moment": "", "async": "", "passport-oauth": "", + "markdown": "", "node-uuid": "~1.4.1", - "MD5": "~1.2.1" + "MD5": "~1.2.1", + "pushover-notifications": "~0.2.2" }, "devDependencies": { "supervisor": "" diff --git a/public/css/biomed/navbar.less b/public/css/biomed/navbar.less index 5e66f0e..8ef88aa 100644 --- a/public/css/biomed/navbar.less +++ b/public/css/biomed/navbar.less @@ -46,6 +46,10 @@ min-height: 40px; } + .nav { + width: 100%; + } + .nav > li { border-right: 1px solid #41bedd; } @@ -76,3 +80,13 @@ background-color: @navbarSecondaryLinkBackgroundActive; } } + + +.day-of-year { + float: right !important; + position: relative; + color: white; + font-size: 18px; + top: 10px; + right: 10px; +} diff --git a/public/css/biomed/widgets.less b/public/css/biomed/widgets.less index 4ffbdbf..2f548ff 100644 --- a/public/css/biomed/widgets.less +++ b/public/css/biomed/widgets.less @@ -288,6 +288,10 @@ header { width: 140px; } + .tech-current { + font-weight: bold; + } + .enteries { margin-left: 150px; position: relative; @@ -719,3 +723,33 @@ header { background-position: -7px -400px; } } + + + + +th.sort-true::after { + width: 0; + height: 0; + border-left: 8px solid transparent; + border-right: 8px solid transparent; + + border-top: 8px solid #000; + content: ""; + top: 12px; + left: 10px; + position: relative; +} + +th.sort-false::after { + width: 0; + height: 0; + border-left: 8px solid transparent; + border-right: 8px solid transparent; + + border-bottom: 8px solid #000; + content: ""; + bottom: 11px; + left: 10px; + position: relative; +} + diff --git a/public/js/app.js b/public/js/app.js index 2691c84..09ebca5 100644 --- a/public/js/app.js +++ b/public/js/app.js @@ -9,6 +9,7 @@ angular.module('biomed', ['biomed.filters', 'biomed.services', 'biomed.directive sales: 'Sales', other: 'Others' }; + $rootScope.dayOfYear = moment().dayOfYear(); }) .config(function($routeProvider, $locationProvider, $httpProvider) { diff --git a/public/js/controllers.js b/public/js/controllers.js index 7708e1f..37b50e3 100644 --- a/public/js/controllers.js +++ b/public/js/controllers.js @@ -18,9 +18,10 @@ biomed.TechScheduleCtrl = function($scope, $routeParams, $location, Schedule, Us function updateDate() { Schedule.index({ tech: $routeParams.id, - start: $scope.date.toJSON(), - end: moment($scope.date).add('days', 7).toDate().toJSON() + start: moment($scope.date).subtract('days', 10).toDate().toJSON(), + end: moment($scope.date).add('days', 21).toDate().toJSON() }, function(result) { + console.log(result); $scope.schedule = result; }); } @@ -85,6 +86,25 @@ biomed.SchedulePmsCtrl = function($scope, Clients) { }); } + $scope.sort = { + column: 'client.name', + descending: false + }; + + $scope.selectedCls = function(column) { + return column == $scope.sort.column && 'sort-' + $scope.sort.descending; + } + + $scope.changeSorting = function(column) { + var sort = $scope.sort; + if (sort.column == column) { + sort.descending = !sort.descending; + } else { + sort.column = column; + sort.descending = false; + } + }; + $scope.$watch('month', filter); }; @@ -181,17 +201,60 @@ biomed.UserClockCtrl = function($scope, $routeParams, Users) { }; biomed.PostIndexCtrl = function($scope, $routeParams, Posts, LocationBinder) { - $scope.loading = true; + var updatePosts = function() { + $scope.loading = true; - $scope.posts = Posts.index(function() { - $scope.loading = false; - }); + $scope.posts = Posts.index( + {page: $scope.page}, + function() { + $scope.loading = false; + + $scope.posted = 0; + + angular.forEach($scope.posts, function(value) { + if (value.status === "posted") { + $scope.posted += 1; + } + }); + }); + }; + + $scope.selectPage = function(page) { + $scope.page = page; + }; + + $scope.$watch('page', updatePosts); }; biomed.PostAddCtrl = function($scope, Posts, $location) { + $scope.tagOptions = { + 'multiple': true, + 'simple_tags': true, + 'tags': [], + 'formatNoMatches': function() { return 'Type a tag and press return to add it.'; } + }; + + $scope.pages = [ + { value: 'front', label: 'Front Page' }, + { value: 'about-us', label: 'About Us' }, + { value: 'sales', label: 'Sales' }, + { value: 'service', label: 'Service' } + ]; + + $scope.togglePage = function(page) { + var idx = $scope.model.pages.indexOf(page.value); + if (idx > -1) { + $scope.model.pages.splice(idx, 1); + } else { + $scope.model.pages.push(page.value); + } + } + $scope.model = { - gallery: [] + gallery: [], + pages: [], + postedOn: new Date() }; $scope.titleImageOptions = { @@ -255,10 +318,6 @@ biomed.PostAddCtrl = function($scope, Posts, $location) { $scope.model.status = status; $scope.model.createdOn = new Date(); - if (status === 'posted') { - $scope.model.postedOn = new Date(); - } - Posts.create($scope.model, function(result) { $location.path("/posts/" + result._id); }); @@ -276,9 +335,34 @@ biomed.PostAddCtrl = function($scope, Posts, $location) { biomed.PostEditCtrl = function($scope, Posts, $routeParams, $location) { var galleryImages = {}; + $scope.tagOptions = { + 'multiple': true, + 'simple_tags': true, + 'tags': [], + 'formatNoMatches': function() { return 'Type a tag and press return to add it.'; } + }; + + $scope.pages = [ + { value: 'front', label: 'Front Page' }, + { value: 'about-us', label: 'About Us' }, + { value: 'sales', label: 'Sales' }, + { value: 'service', label: 'Service' } + ]; + + $scope.togglePage = function(page) { + var idx = $scope.model.pages.indexOf(page.value); + if (idx > -1) { + $scope.model.pages.splice(idx, 1); + } else { + $scope.model.pages.push(page.value); + } + } + $scope.model = Posts.get($routeParams, function() { $scope.loading = false; + console.log($scope.model); + if ($scope.model.image) { $scope.existingTitleImages = [$scope.model.image]; } @@ -287,6 +371,10 @@ biomed.PostEditCtrl = function($scope, Posts, $routeParams, $location) { for (var i = 0; i < $scope.model.gallery.length; i++) { galleryImages[$scope.model.gallery[i]] = 1; } + + if (!$scope.model.postedOn) { + $scope.model.postedOn = new Date(); + } }); $scope.titleImageOptions = { @@ -349,11 +437,7 @@ biomed.PostEditCtrl = function($scope, Posts, $routeParams, $location) { $scope.model.gallery = Object.keys(galleryImages); $scope.model.status = status; - if (status === 'posted') { - $scope.model.postedOn = new Date(); - } else { - $scope.model.postedOn = null; - } + console.log($scope.model); Posts.update({id: $scope.model._id}, $scope.model, function(result) { $location.path("/posts/"); @@ -396,7 +480,7 @@ biomed.ClientIndexCtrl = function($scope, $filter, $routeParams, Clients, Locati LocationBinder($scope, ['query']); $scope.filter = function() { - filteredData = $filter('filter')(allData, $scope.query); + filteredData = $filter('orderBy')($filter('filter')(allData, $scope.query), $scope.sort.column, $scope.sort.descending); index = initialPageSize; $scope.canLoad = true; $scope.clients = filteredData.slice(0, initialPageSize); @@ -407,6 +491,27 @@ biomed.ClientIndexCtrl = function($scope, $filter, $routeParams, Clients, Locati index += pageSize; $scope.canLoad = index < filteredData.length; } + + $scope.sort = { + column: 'name', + descending: false + }; + + $scope.selectedCls = function(column) { + return column == $scope.sort.column && 'sort-' + $scope.sort.descending; + } + + $scope.changeSorting = function(column) { + var sort = $scope.sort; + if (sort.column == column) { + sort.descending = !sort.descending; + } else { + sort.column = column; + sort.descending = false; + } + + $scope.filter(); + }; }; biomed.ClientAddCtrl = function($scope, Clients, $location) { @@ -578,6 +683,10 @@ biomed.WorkorderIndexCtrl = function($scope, $filter, $routeParams, Workorders, $scope.$watch('end', fetchData); + $scope.sort = { + column: 'scheduling.start', + descending: true + }; $scope.addItems = function() { $scope.workorders = $scope.workorders.concat(filteredData.slice(index, index + pageSize)); @@ -592,6 +701,20 @@ biomed.WorkorderIndexCtrl = function($scope, $filter, $routeParams, Workorders, $scope.workorders = filteredData.slice(0, initialPageSize); }; + $scope.selectedCls = function(column) { + return column == $scope.sort.column && 'sort-' + $scope.sort.descending; + } + + $scope.changeSorting = function(column) { + var sort = $scope.sort; + if (sort.column == column) { + sort.descending = !sort.descending; + } else { + sort.column = column; + sort.descending = false; + } + }; + function fetchData() { $scope.loading = true; @@ -657,6 +780,10 @@ biomed.WorkorderAddCtrl = function($scope, $location, Workorders, Schedule, Clie $scope.$watch('group', updateUsers); + $scope.$watch('model.client', function() { + $scope.currentClient = Clients.get({ id: $scope.model.client }); + }); + Clients.index(function(result) { $scope.clients = result; }); diff --git a/public/js/directives.js b/public/js/directives.js index 7630ed6..880ffe1 100644 --- a/public/js/directives.js +++ b/public/js/directives.js @@ -89,10 +89,17 @@ angular.module('biomed.directives', []) attr.$observe('value', update)(); attr.$observe('title', function(){ update(); a.text(tab.title); })(); + attr.$observe('visible', function(){ + update(); + tab.tabElement[0].style.display = (tab.visible === "false") ? 'none' : 'block'; + })(); function update() { + console.log(attr.visible); tab.title = attr.title; tab.value = attr.value || attr.title; + tab.visible = attr.visible; + if (!ngModel.$setViewValue && (!ngModel.$viewValue || tab == selectedTab)) { // we are not part of angular ngModel.$viewValue = tab.value; @@ -235,17 +242,17 @@ angular.module('biomed.directives', []) function setupScale() { x = d3.scale.linear() .range([0, 100]) - .domain([420, 1320]) + .domain([420, 1140]) .clamp(true); } setupScale(); var color = d3.scale.category20(); - var hourWidth = 100 / 15; + var hourWidth = 100 / 12; $scope.hourMarkers = []; - for (var i = 7; i < 22; i++) { + for (var i = 7; i < 19; i++) { $scope.hourMarkers.push({ date: moment({ hour: i }).toDate(), style: { @@ -266,13 +273,16 @@ angular.module('biomed.directives', []) function generateDate() { var range = moment($scope.date); var data = {}; + var current = range.format('ddd MMM Do YYYY'); - for (var i = 0; i < 7; i++) { + for (var i = -7; i < 22; i++) { var day = range.clone().add(i, 'days'); var key = day.format('MM-DD-YYYY'); var label = day.format('ddd MMM Do YYYY'); data[key] = { + order: i, + current: current == label, label: label, values: [] }; @@ -345,7 +355,12 @@ angular.module('biomed.directives', []) }) }); - $scope.data = data; + var dataArray = []; + for (var o in data) { + dataArray.push(data[o]); + } + + $scope.data = dataArray; } } }; @@ -374,7 +389,7 @@ angular.module('biomed.directives', []) rangeDate = moment($scope.date).startOf('day'); rangeStart = moment(rangeDate).add('hours', 7); - rangeEnd = moment(rangeDate).add('hours', 22); + rangeEnd = moment(rangeDate).add('hours', 19); x = d3.time.scale() .range([0, 100]) diff --git a/public/partials/clients/edit.html b/public/partials/clients/edit.html index 7f32cae..322fd27 100644 --- a/public/partials/clients/edit.html +++ b/public/partials/clients/edit.html @@ -257,7 +257,7 @@ -
    +
    diff --git a/public/partials/clients/index.html b/public/partials/clients/index.html index 6318f2a..2651aca 100644 --- a/public/partials/clients/index.html +++ b/public/partials/clients/index.html @@ -20,10 +20,10 @@
    - - - - + + + + diff --git a/public/partials/posts/add.html b/public/partials/posts/add.html index c61f036..f64c7cb 100644 --- a/public/partials/posts/add.html +++ b/public/partials/posts/add.html @@ -29,6 +29,32 @@ +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + +
    + +
    +
    @@ -42,7 +68,7 @@
    -
    +
    diff --git a/public/partials/posts/edit.html b/public/partials/posts/edit.html index a97d329..f832560 100644 --- a/public/partials/posts/edit.html +++ b/public/partials/posts/edit.html @@ -29,6 +29,32 @@
    +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + +
    + +
    +
    @@ -42,7 +68,7 @@
    -
    +
    diff --git a/public/partials/posts/index.html b/public/partials/posts/index.html index f19087a..bedf87f 100644 --- a/public/partials/posts/index.html +++ b/public/partials/posts/index.html @@ -9,20 +9,28 @@
    Create new Post + All + Front Page + About Us + Sales + Service +
    + Total Published: {{posted}} +
    Client NameContactPhoneIDClient NameContactPhone
    - - - - - + + + + + - - + + - + @@ -37,6 +37,7 @@ + @@ -57,6 +58,7 @@ +
    TitleAuthorCreated onPosted onStatusTitleAuthorCreated onPosted onStatus
    There is no information to display.
    There is no information to display.
    diff --git a/public/partials/schedule/pms.html b/public/partials/schedule/pms.html index 84a8a5f..19d5465 100644 --- a/public/partials/schedule/pms.html +++ b/public/partials/schedule/pms.html @@ -33,20 +33,20 @@ - - - - + + + + - + - +
    Client NameReasonContactPhoneClient NameReasonContactPhone
    There is no information to display.
    {{pm.client.name}} ({{pm.client.identifier | uppercase}})
    {{pm.reason}} {{pm.client.contacts[0].name}}{{pm.lient.contacts[0].phone}}{{pm.client.contacts[0].phone}}
    diff --git a/public/partials/techSchedule.html b/public/partials/techSchedule.html index a91e9e3..307a216 100644 --- a/public/partials/techSchedule.html +++ b/public/partials/techSchedule.html @@ -12,7 +12,7 @@
    -
    {{values.label}}
    +
    {{values.label}}
    {{entry.workorder.client.identifier}}
    diff --git a/public/partials/users/index.html b/public/partials/users/index.html index d15ae9c..def8734 100644 --- a/public/partials/users/index.html +++ b/public/partials/users/index.html @@ -21,7 +21,7 @@
    GroupsPermissionsPermissions
    NameEdit Site AdminFrequency
    diff --git a/public/partials/workorders/add.html b/public/partials/workorders/add.html index e8ad0cc..9b2647c 100644 --- a/public/partials/workorders/add.html +++ b/public/partials/workorders/add.html @@ -21,6 +21,26 @@
    +
    + +
    + {{currentClient.contacts[0].name}} +
    + {{currentClient.address.street1}}
    + {{currentClient.address.street2}}
    + {{currentClient.address.city}}, {{currentClient.address.state}}. {{currentClient.address.zip}} +
    + {{currentClient.contacts[0].phone}} +
    +
    +
    + +
    {{currentClient.notes['internal']}}
    +
    +
    + +
    {{currentClient.notes['tech']}}
    +
    diff --git a/public/partials/workorders/index.html b/public/partials/workorders/index.html index 52174f3..bbced8d 100644 --- a/public/partials/workorders/index.html +++ b/public/partials/workorders/index.html @@ -27,17 +27,17 @@
    - - - - - - + + + + + + - +
    WorkorderClientDateStatus
    WorkorderClientDateStatus
    There is no information to display.
    #{{workorder.biomedId}} - {{workorder.reason}}
    Techs: {{ workorder.techs | techs }}
    diff --git a/server.js b/server.js index e6b82a5..4f06a9d 100644 --- a/server.js +++ b/server.js @@ -1,16 +1,38 @@ +var pushover = require('pushover-notifications'); + var express = require('express') fs = require('fs'), passport = require('passport'); -var env = process.env.NODE_ENV || 'development', +var env = 'prod', config = require('./config/config')[env], mongoose = require('mongoose'); var log = require('log4node'); -log.reconfigure({ - level: 'info', - file: 'server.log' -}); + + + process.on('uncaughtException', function(err) { + console.log('Uncaught Exception:', err); + console.log(err.stack); + + var p = new pushover({ + user: 'aJmPD4KigO0vLwim76n3WqWKwbKA3k', + token: 'YxspDLz3WinbPmwBThuZXCME9QmkDb' + }); + + var message = { + title: 'Unhandled error in portal', + message: 'Process was reset on ' + new Date(), + sound: 'falling' + }; + p.send(message, function(err, result) { + if (err) { + log.emergency('Error while sending pushover notification'); + log.emergency(err); + } + process.exit(1); + }); + }); log.info("----- Server Started -----");