By Ryan Ramage / @ryan_ramage_
thing1 | thing2 | thing3 |
---|---|---|
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
1 | 2 | 3 |
Get the shituff to the user as fast as possible
Because Speed is King in applications
Garden hose
Dealing with disk/network/io/processor variability
Keep memory spikes down
The internet was built for streams.
TCP uses an end-to-end flow control protocol to avoid having the sender send data too fast for the TCP receiver to receive and process it reliably.
-Wikipedia on TCP Flow control
Wrapping xhr with a progressive streaming interface
A JSON parser that sits somewhere between SAX and DOM.
Dealing with failure mid request/response
Incremental loading, and browser caching
Easy JSON slice and dice
GET /myapp/things
{
"foods": [
{"name":"aubergine", "colour":"purple"},
{"name":"apple", "colour":"red"},
{"name":"nuts", "colour":"brown"}
],
"badThings": [
{"name":"poison", "colour":"pink"},
{"name":"broken_glass", "colour":"green"}
]
}
MyApp.showSpinner('#foods');
oboe('/myapp/things')
.node('foods.*', function( foodThing ){
$.templateThing.append( foodThing.name + ' is ' + foodThing.colour );
})
.node('badThings.*', function( badThing ){
console.log( 'Danger! stay away from ' + badThings.name );
})
.done( function(things){
MyApp.hideSpinner('#foods');
console.log( 'there are ' + things.foods.length + ' things you can eat ' +
'and ' + things.nonFoods.length + ' that you shouldn\'t.' );
});
oboe('/myapp/things')
.node('{name colour}', function( foodObject ) {
// I'll get called for every object found that
// has both a name and a colour
};
Streaming between node and the browser
"Streams in node are one of the rare occasions when doing something the fast way is actually easier. SO USE THEM. not since bash has streaming been introduced into a high level language as nicely as it is in node."
var http = require('http');
var fs = require('fs');
var server = http.createServer(function (req, res) {
fs.readFile(__dirname + '/movie.mp4', function (err, data) {
res.end(data);
});
});
server.listen(8000);
var http = require('http');
var fs = require('fs');
var server = http.createServer(function (req, res) {
var stream = fs.createReadStream(__dirname + '/movie.mp4');
stream.pipe(res);
});
server.listen(8000);
var fs = require("fs");
// Read File
fs.createReadStream("input/people.json")
// Write File
.pipe(fs.createWriteStream("output/people.json"));
var fs = require("fs");
var zlib = require("zlib");
// Read File
fs.createReadStream("input/people.csv.gz")
// Un-Gzip
.pipe(zlib.createGunzip())
// Write File
.pipe(fs.createWriteStream("output/people.csv"));
Advice: Watch for > 1 year inactivity.
Streams have changed a lot
Look for trusted names in the stream eco-system
You can stream any response to a file stream.
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
The EventStream functions resemble the array functions, because Streams are like Arrays, but laid out in time, rather than in memory.
Example of event stream
var inspect = require('util').inspect
var es = require('event-stream') //load event-stream
es.pipe( //pipe joins streams together
process.openStdin(), //open stdin
es.split(), //split stream to break on newlines
es.map(function (data, callback) {//turn this async function into a stream
var j
try {
j = JSON.parse(data) //try to parse input into json
} catch (err) {
return callback(null, data) //if it fails just pass it anyway
}
callback(null, inspect(j)) //render it nicely
}),
process.stdout // pipe it to stdout !
)
// curl -sS registry.npmjs.org/event-stream | node pretty.js
//
> npm install -g stream-adventure
> stream-adventure
request('http://localhost:5984/db/_design/app/_view/things_by_date').pipe(resp);
var filter = function(data, emit) {
data.forEach(function(db){
if (db.indexOf('dumb_user-') === 0) {
emit('"' + strip_prefix(db) + '"');
}
});
}
var filter_through = new FilterThrough(filter);
request('http://localhost:5984/_all_dbs')
.pipe(filter_through)
.pipe(resp);
00:00: > curl -X GET "http://localhost:5984/db/_changes?feed=continuous&since=3&filter=app/important"
{"seq":4,"id":"test4","changes":[{"rev":"1-02c6b758b08360abefc383d74ed5973d"}]}
{"seq":5,"id":"test5","changes":[{"rev":"1-02c6b758b08360abefc383d74ed5973d"}]}
var levelup = require('levelup');
var srcdb = levelup('./srcdb');
var dstdb = levelup('./destdb');
srcdb.put('name', 'LevelUP');
srcdb.createReadStream().pipe(dstdb.createWriteStream()).on('close', onDone)
srcdb.createReadStream({
start: 'n',
end: 'k'
}).pipe(resp)
var scriptFiles = './src/**/*.js';
gulp.task('compile', function(){
// concat all scripts, minify, and output
gulp.src(scriptFiles)
.pipe(concat({fileName: pkg.name+".js"})
.pipe(minify())
.pipe(gulp.dest('./dist/'));
});
gulp.task('test', function(){
// lint our scripts
gulp.src(scriptFiles).pipe(jshint());
// run our tests
spawn('npm', ['test'], {stdio: 'inherit'});
});
gulp.task('default', function(){
gulp.run('test', 'compile');
gulp.watch(scriptFiles, function(){
gulp.run('test', 'compile');
});
});
// please provide
class People extends MVCrap {
getAll()
getByX()
}
stream-to-pull-stream: Convert a classic-stream, or a new-stream into a pull-stream
invert-stream: Create a pair of streams (A, B) such that A.write(X) -> B.emit('data', X) and B.write(X) -> A.emit('data', X)
The Node concurrency model is kind of like a credit card for computing work. Credit cards free you from the hassle and risks of carrying cash, (...) unless you spend more than you make. Try making 200,000 HTTP client requests in a tight loop, starting each request without waiting for the previous response.
Once a process can't keep up with its work and starts to get slow, this slowness often cascades into other processes in the system. Streams are not the solution here. In fact, streams are kind of what gets us into this mess in the first place by providing a convenient fire and forget API for an individual socket or HTTP request without regard to the health of the rest of the system
Mix and match:
Try them out today!
By Ryan Ramage / @ryan_ramage_