MEAN Stack – Avoid callback hell with RxJS

What is RxJS?

RxJS is a JavaScript library for reactive programming using Observables to simplify composing asynchronous code. Roughly put, the library allows subscribing to a stream of values (Observable) that change overtime and execute methods based on the changes in value.

What is callback hell?

Simply put, callback hell is the result of nesting too many success and error callback methods in asynchronous JavaScript code. To learn more, please read http://callbackhell.com/

What about Promises?

Promises alleviate the pain of using callbacks by chaining function calls to asynchronous methods.  However, when one method depends on execution of multiple asynchronous methods, nesting method calls is inevitable.

Enough with the 101, let’s see a use case:

We have an event registration form that allows users to register to my online event. The registration form contains the following fields: FirstName, LastName, EmailAddress,Country and Office

Requirement #1 – Users cannot register twice. User email address must be unique.

import * as express from "express";
import * as HttpStatus from "http-status-codes";
import * as mongodb from "mongodb";

function registerUser(req: express.Request, res: express.Response, next: express.NextFunction) {
  let user = req.body;
  let onError = (error) => { handleError(error, res) };
  findUser(user, res, insertUser, onError);
}

function findUser(user: any, res: express.Response, onFindSuccess: Function, onFindError: Function) {
  mongodb.MongoClient.connect("mongodb://localhost:27017/events").then(db => {
    db.collection("users").findOne({ emailAddress: user.emailAddress }).then(user => {
      if (user) {
        onFindSuccess(user, res, onFindError);
      }
      else {
        onFindError("Email already exists")
      }
    }).catch(error => onFindError(error));
  }).catch(error => onFindError(error));
}
function insertUser(user: any, res: express.Response, onInsertError: Function) {
  mongodb.MongoClient.connect("mongodb://localhost:27017/events").then(db => {
    db.collection("users")
      .insert(user)
      .then(user => {
        res.status(HttpStatus.CREATED).send("Registration Created");
      })
      .catch(error => onInsertError(error, res));
  }).catch(error => onInsertError(error, res));
}

function handleError(error: any, res: any) {
  res.status(HttpStatus.INTERNAL_SERVER_ERROR).send("Server Error");
}
Requirement #2 – Okay, that was not so bad. Now, how about also validating that countryId and officeId are also valid.
import * as express from "express";
import * as HttpStatus from "http-status-codes";
import * as mongodb from "mongodb";
function registerUser(req: express.Request, res: express.Response, next: express.NextFunction) {
  let user = req.body;
  let onError = (error) => { handleError(error, res) };
  findOffice(user, res, onError);
}
function findOffice(user: any, res: express.Response, onFindError: Function) {
  mongodb.MongoClient.connect("mongodb://localhost:27017/events").then(db => {
    db.collection("offices").findOne({ _id: new mongodb.ObjectID(user.officeId) })
    .then(office => {
      if (office) {
        findCountry(user, res, onFindError);
      }
      else {
        onFindError("Bad Request");
      }
    }).catch(error => onFindError(error));
  }).catch(error => onFindError(error));
}
function findCountry(user: any, res: express.Response, onFindError: Function) {
  mongodb.MongoClient.connect("mongodb://localhost:27017/events").then(db => {
      db.collection("countries").findOne({ _id: new mongodb.ObjectID(user.countryId) })
        .then(office => {
          if (office) {
            findUser(user, res, onFindError);
          }
          else {
            onFindError("Bad Request");
          }
        }).catch(error => onFindError(error));
  }).catch(error => onFindError(error));
}
function findUser(user: any, res: express.Response, onFindError: Function) {
  mongodb.MongoClient.connect("mongodb://localhost:27017/events").then(db => {
    db.collection("users")
      .findOne({ emailAddress: user.emailAddress })
      .then(user => {
        if (user) {
          insertUser(user, res, onFindError);
        }
        else {
          onFindError("Email already exists")
        }
      }).catch(error => onFindError(error));
  }).catch(error => onFindError(error));
}
function insertUser(user: any, res: express.Response, onInsertError: Function) {
  mongodb.MongoClient.connect("mongodb://localhost:27017/events").then(db => {
    db.collection("users")
      .insert(user)
      .then(user => {
        res.status(HttpStatus.CREATED).send("Registration Created");
      })
      .catch(error => onInsertError(error, res));
  }).catch(error => onInsertError(error, res));
}
function handleError(error: any, res: any) {
  res.status(HttpStatus.INTERNAL_SERVER_ERROR).send("Server Error");
}

You can see that things are starting to get a little insane.. What if I need to add one more validation? This type of code can become increasingly entangled with each modification.

Let’s try this again with RxJS:

Requirement #1 – Users cannot register twice. User email address must be unique.

import * as express from "express";
import * as HttpStatus from "http-status-codes";
import * as mongodb from "mongodb";
import * as rx from "rxjs";

function registerUser(req: express.Request, res: express.Response, next: express.NextFunction) {
  let user = req.body;
  findUser(user).subscribe(success => {
    insertUser(user).subscribe(success => {
         res.status(HttpStatus.OK).send("Request Created");
      }, error => {
         res.status(HttpStatus.INTERNAL_SERVER_ERROR).send("Server Error");
      });
    }, error => {
       res.status(HttpStatus.INTERNAL_SERVER_ERROR).send("Server Error");
    });
}

function insertUser(user: any) {
  return rx.Observable.create((observer: rx.Observer<any>) => {
    mongodb.MongoClient.connect("mongodb://localhost:27017/events")
      .then((db: mongodb.Db) => {
        db.collection("users").insert(user)
          .then(result => {
            observer.next(result);
            observer.complete();
          }).catch((error) => onError(observer, error));
        }).catch((error) => onError(observer, error));
    });
}

function findUser(user: any): rx.Observable<any> {
  return rx.Observable.create((observer: rx.Observer<any>) => {
    mongodb.MongoClient.connect("mongodb://localhost:27017/events")
      .then((db: mongodb.Db) => {
        db.collection("users").findOne({ emailAddress: user.emailAddress })
          .then(result => {
            observer.next(result);
            observer.complete();
          }).catch((error) => onError(observer, error));
      }).catch((error) => onError(observer, error));
});
}

function onError(observer: rx.Observer<any>, error: any) {
  observer.error(error);
  observer.complete();
}

function handleError(error: any, res: any) {
  res.status(HttpStatus.INTERNAL_SERVER_ERROR).send("Server Error");
}

Requirement #2 – Now, how about also validating that countryId and officeId are also valid. This is where RxJS really shines.

import * as express from "express";
import * as HttpStatus from "http-status-codes";
import * as mongodb from "mongodb";
import * as rx from "rxjs";

function registerUser(req: express.Request, res: express.Response, next: express.NextFunction) {
  let user = req.body;
  let countryValidations = validateCountry(user.countryId);
  let officeValidations = validateOffice(user.officeId);
  let userValidations = findUser(user);

  // Validate all my rules
  let validations = rx.Observable.forkJoin(countryValidations, officeValidations, userValidations);

  // If all validations pass
  validations.filter(results => results.every(valid => valid))
    .subscribe(
      results => {
        insertUser(user).subscribe(
          result => res.status(HttpStatus.CREATED).send("Registration Created"),
          error => handleError(error, res)
        );
      },
      error => handleError(error, res)
  );

  // If any validations fail
  validations.filter(results => results.some(valid => !valid))
  .subscribe(results => {
    res.status(HttpStatus.BAD_REQUEST).send("Bad Request");
  });
}

function validateOffice(officeId: any): rx.Observable<boolean> {
  return rx.Observable.create((observer: rx.Observer<any>) => {
    mongodb.MongoClient.connect("mongodb://localhost:27017/events")
      .then((db: mongodb.Db) => {
        db.collection("offices").findOne({ _id: new mongodb.ObjectID(officeId) })
          .then(result => {
          observer.next(result);
          observer.complete();
        }).catch((error) => onError(observer, error));
    }).catch((error) => onError(observer, error));
  });
}

function validateCountry(countryId: any): rx.Observable<boolean> {
  return rx.Observable.create((observer: rx.Observer<any>) => {
    mongodb.MongoClient.connect("mongodb://localhost:27017/events")
      .then((db: mongodb.Db) => {
        db.collection("countries").findOne({ _id: new mongodb.ObjectID(countryId) })
          .then(result => {
            observer.next(result);
            observer.complete();
      }).catch((error) => onError(observer, error));
    }).catch((error) => onError(observer, error));
  });
}

function insertUser(user: any) {
  return rx.Observable.create((observer: rx.Observer<any>) => {
    mongodb.MongoClient.connect("mongodb://localhost:27017/events")
      .then((db: mongodb.Db) => {
        db.collection("users").insert(user)
          .then(result => {
             observer.next(result);
             observer.complete();
       }).catch((error) => onError(observer, error));
     }).catch((error) => onError(observer, error));
  });
}

function findUser(user: any): rx.Observable<any> {
  return rx.Observable.create((observer: rx.Observer<any>) => {
    mongodb.MongoClient.connect("mongodb://localhost:27017/events")
      .then((db: mongodb.Db) => {
        db.collection("users").findOne({ emailAddress: user.emailAddress })
          .then(result => {
            observer.next(result);
            observer.complete();
          }).catch((error) => onError(observer, error));
     }).catch((error) => onError(observer, error));
  });
}

function onError(observer: rx.Observer<any>, error: any) {
  observer.error(error);
  observer.complete();
}

function handleError(error: any, res: any) {
  res.status(HttpStatus.INTERNAL_SERVER_ERROR).send("Server Error");
}

By utilizing RxJS, we were able to decouple our validation methods so that each validation can happen independently and concurrently. We also made it possible to add or remove any validations in the future. Additionally, we can filter all validations so that the user is registered only when all validations fail, and alternatively we can return an error as soon as the first validation fails.

References:

RxJS – https://github.com/Reactive-Extensions/RxJS
JavasScript Callback Hell – http://callbackhell.com/
JavaScript Promises – https://developers.google.com/web/fundamentals/getting-started/primers/promises

Advertisements

Rapid Prototype – Restaurant Management Mobile App

In the modern age, accepting technology can allow us to innovate even the oldest industries. The restaurant industry is one such industry. It’s possible to have a successful restaurant business without using any technology, there is always a room to increase revenue or decrease expenses and optimize operational costs. These areas can range from efficient staffing; inventory management; adoption of point of sale devices; customer engagement; floor and kitchen efficiency; etc.

As a challenge to myself, I began rapidly prototyping a mobile app with a time limit of two days. And although speed of development was a main constraint, the application should try not sacrifice usability and feasibility. Finally, more importantly, the prototype should provide value to the industry by: 1. Improving the speed of receiving an order from the menu, and communicating the details of the order to the kitchen. 2. Integrate with existing billing process (POS) and provide insights into the performance of the restaurant menu.

orders
Waiters can take orders quickly with onscreen keyboard and product codes
kitchen
Kitchen is immediately notified when an order is placed
admin
Restaurant managers can see metrics on their top products and sales by week

The source code of the application is available on Github.
A live demo is uploaded to Heroku

 

Nom3 – Look Up and Discover Recipes by Selecting Ingredients

Quite often I find myself on a website featuring tasty recipes. However, being lazy, I simply refuse to do an ingredient run to make sure I have everything ready and prepared in order to make lunch or dinner. If I’m hungry, it’s time to cook, with anything that is available in the house. The website Nom3 (short for NomNomNom) is dedicated to people lazy like me who would rather pick from recipes from whatever ingredients are available in the kitchen rather than spend the time extensively shopping before cooking.

Go forth and search for whatever recipes that you can cook with the ingredients you already have in your kitchen. You might even discover something new!

nom3

Disclaimer… I cannot cook, and my wife does all of the cooking.  Thanks Wifey! ♥

References:
Github Source Code – https://github.com/saichaitanya88/nomnomnom
Live Site – http://nom3.saichaitanya.ca/#/

An Approach to Multi-Tenant Applications with MongoDB

NoSQL databases like MongoDB allow the ability to scale easily, develop applications faster by supporting flexible schema. However, due to the flexible schema, it’s not the first choice database for most people. I believe that MongoDB can be used innovatively or creatively in order to build multi-tenant applications. This can be accomplished by handling the client metadata in one database, while storing the client specific data in separate databases. Let us explore one such approach below where the application allows users to define their own schema and perform CRUD operations on their schema.

In the application architecture described in Figure 1, we see that if the application is split into one main database to store the metadata for the application users, it’s possible to store the client data in their own separate databases, this allows for us to scale a particular client’s database on demand. For most small companies that provide software as a service, I would speculate that it’s common for them to find themselves with one or two large clients that demand for the architectural changes that benefit them, while affecting the other clients negatively (in poorly designed systems, anyway). The primary advantage in storing the database separately, is that while scaling individual client databases, it’s also possible for us to scale the application by spinning up new instances of the servers, but that’s out of the scope of this article.

app-architecture
Figure1: Multi-Tenant Architecture with Multiple Databases

Storing Client Account Information:

Let’s set up the main database (System DB) to store the account information. The account information contains one user login for the account along with the data schema.

{
    "_id" : ObjectId("557126a6edd785072443982c"),
    "email" : "saichaitanya88@gmail.com",
    "password" : "<password>",
    "lastName" : "lastname",
    "firstName" : "firstname",
    "podName" : "v2gncto",
    "createdAt" : ISODate("2015-06-05T04:33:42.191Z"),
    "updatedAt" : ISODate("2015-06-06T01:44:03.761Z"),
    "otherInfo" : {}
}

The parameter podName contains the database name that contains the client data, the podName parameter can be extended to store more database connection string.

Storing Client Database Schema:

Client specific schema is stored into the CustomObjects collection:

{
    "_id" : ObjectId("558a021a0232718a087ed237"),
    "name" : "Person",
    "customObjectName" : "person",
    "description" : "Person customObject",
    "accountId" : ObjectId("557126a6edd785072443982c"),
    "createdAt" : ISODate("2015-06-24T01:04:26.393Z"),
    "updatedAt" : ISODate("2015-06-24T02:03:50.203Z"),
    "createdBy" : ObjectId("557126a6edd785072443982c"),
    "updatedBy" : ObjectId("557126a6edd785072443982c"),
    "modelDefinition" : [
        {
            "_id" : ObjectId("558a021a0232718a087ed238"),
            "name" : "Created At",
            "fieldName" : "createdAt",
            "description" : "Created At",
            "type" : "Date",
            "scope" : "System",
            "createdAt" : ISODate("2015-06-24T01:04:26.393Z"),
            "updatedAt" : ISODate("2015-06-24T01:04:26.393Z"),
            "createdBy" : ObjectId("557126a6edd785072443982c"),
            "updatedBy" : ObjectId("557126a6edd785072443982c")
        },
        {
            "_id" : ObjectId("558a021a0232718a087ed239"),
            "name" : "Updated At",
            "fieldName" : "updatedAt",
            "description" : "Updated At",
            "type" : "Date",
            "scope" : "System",
            "createdAt" : ISODate("2015-06-24T01:04:26.393Z"),
            "updatedAt" : ISODate("2015-06-24T01:04:26.393Z"),
            "createdBy" : ObjectId("557126a6edd785072443982c"),
            "updatedBy" : ObjectId("557126a6edd785072443982c")
        },
        {
            "_id" : ObjectId("558a021a0232718a087ed23a"),
            "name" : "Created By",
            "fieldName" : "createdBy",
            "description" : "Created By",
            "type" : "ObjectId",
            "scope" : "System",
            "createdAt" : ISODate("2015-06-24T01:04:26.393Z"),
            "updatedAt" : ISODate("2015-06-24T01:04:26.393Z"),
            "createdBy" : ObjectId("557126a6edd785072443982c"),
            "updatedBy" : ObjectId("557126a6edd785072443982c")
        },
        {
            "_id" : ObjectId("558a021a0232718a087ed23b"),
            "name" : "Updated By",
            "fieldName" : "updatedBy",
            "description" : "Updated By",
            "type" : "ObjectId",
            "scope" : "System",
            "createdAt" : ISODate("2015-06-24T01:04:26.393Z"),
            "updatedAt" : ISODate("2015-06-24T01:04:26.393Z"),
            "createdBy" : ObjectId("557126a6edd785072443982c"),
            "updatedBy" : ObjectId("557126a6edd785072443982c")
        },
        {
            "_id" : ObjectId("558a1006e42e219619f5b495"),
            "name" : "DOB",
            "fieldName" : "dOB",
            "description" : "Date Of Birth",
            "type" : "Date",
            "scope" : "Application",
            "createdAt" : ISODate("2015-06-24T02:03:50.203Z"),
            "updatedAt" : ISODate("2015-06-24T02:03:50.203Z"),
            "createdBy" : ObjectId("557126a6edd785072443982c"),
            "updatedBy" : ObjectId("557126a6edd785072443982c")
        }
    ]
}

Now, with the schema describing the CustomObjects collection, their fields and data types, we can build a persistence layer to make sure that the data inserted into the client database complies with the schema defined in the CustomObjects collection. (Persistence layer source code)

Client Custom Data:

With these in place, we can build CRUD actions for any conceivable CustomObject collection that the user can define. The details of this is located in the source code. An example Person record according to the schema defined above:

{
    "_id" : ObjectId("558cada3a1899f635d7d3190"),
    "dOB" : ISODate("2015-06-25T03:15:32.229Z"),
    "createdAt" : ISODate("2015-06-26T01:40:51.394Z"),
    "updatedAt" : ISODate("2015-06-28T17:03:14.143Z"),
    "createdBy" : ObjectId("557126a6edd785072443982c"),
    "updatedBy" : ObjectId("557126a6edd785072443982c")
}

Additional Benefits:

While it’s possible for the users to define their own data schema for CustomObjects, it’s also possible for us to provide a single user interface that allows the users to query and modify the objects. The UI looks at the CustomObject schema, and renders the text boxes (for search and edit) based on the data type of the field.

Search UI
Figure 2: Auto-Generated Search UI
Save UI
Figure 3: Auto-Generated Edit UI

Limitations and Considerations:

One limitation in my preliminary work is that MongoDB did not support Collection Joins until version 3.2, which meant that schemas where multiple collections are linked and referenced may not perform well.

It’s also likely that other databases like RethinkDB, CouchDB, Cassandra might be a better fit for this type of application structure, the database that best fits the needs of the application must be chosen.

Future Work:

With client-side JavaScript becoming ever so popular, it’s possible for us to enhance the database schema and allow storage of JavaScript functions that run on the browser. Taking this one step further, we can even store alternative methods to accommodate similar functionality for modules used by the application clients. For example: Calculation of Income Taxes on wages differs from country to country, if each country is a client, then two separate JavaScript functions can be defined in the schema to allow the application to seamlessly use the appropriate Income Tax Calculation. Another example: Two clients (one car sales company, the other software company) wanting to measure employee performance, the car sales company measures performance based on number of sales made, while the software company measures performance based on the ratio of features to bugs per employee.

The schema can also be extended to contain custom Model validation rules allowing validation rules which depend on other collections to be implemented.

References:

MongoDB – https://www.mongodb.org/
NodeJS – https://nodejs.org/en/
Github Source Code – https://github.com/saichaitanya88/contact_manager.prototype

Real-time Push Notifications in a Distributed Environment with NodeJS, RethinkDB and Socket.IO

Building push-notifications using Web Sockets (socket.io or SignalR) has been discussed and implemented quite successfully. As I read and learned about RethinkDB, it got me wondering why there’s a need for a database that provides change-feeds to the clients connected to the database. Well, since most basic applications have a single back-end server, one might not derive a lot of benefit from using RethinkDB for getting push notifications to the server since the server would know when a change happened on the database as it performs commands and queries. However, in a distributed environment, let’s say multiple NodeJS servers interacting with a database, I speculate that it would be highly beneficial to use a database like RethinkDB which automatically updates all servers with the changes on a resource and thus allowing all servers to forward the notifications to the connected clients.

One such implementation is described below:

For the sake of simplicity, our application will allow us to manage contacts with four fields: FirstName, LastName, Email, and Age

The application being discussed allows multiple users to collaborate on individual resources while providing updates to all users are using the application. The key components of this applications are NodeJS (servers), Socket.IO (web sockets), AngularJS (ui) and RethinkDB (See Figure 1).

architecture
Figure 1: Application Architecture

In order to build our application, we can follow the following steps or refer to the source code on my GitHub.

STEP 1: Build CRUD API for Contacts using RethinkDB and NodeJS

The code below implements CRUD actions and exposes the following actions:

[GET] /api/contacts – Get all contacts
[POST] /api/contact – Create new contact
[GET] /api/contact/:id – Retrieve a contact
[PUT] /api/contact/:id – Update a contact
[DELETE] /api/contact/:id – Delete a contact

r = require('rethinkdb');

function ContactsAPI () {
  "use strict";
  var connectionInfo = { host: 'localhost', port: 28015 };
  function executeQuery(fn){
    r.connect(connectionInfo, fn);
  };
  this.GetContacts = function GetContacts(req,res){    
    function getContacts(err, conn) {
      if(err) { res.status(500).send(err); return; }
      r.table('contacts').run(conn, function(err, cursor)
      {
        if(err) { res.status(500).send(err); return; }
        cursor.toArray(function (err, results){
          if(err) { res.status(500).send(err); return; }
          res.status(200).send(results);
        })
      });
    };
    executeQuery(getContacts);
  };
  this.GetContact = function GetContact(req,res){    
    function getContact(err, conn) {
      if(err) { res.status(500).send(err); return; }
      r.table('contacts').get(req.params.id).run(conn, function(err, results)
      {
        if(err) { res.status(500).send(err); return; }
          res.status(200).send(results);
      });
    };
    executeQuery(getContact);
  };
  this.UpdateContact = function UpdateContact(req, res){
    function updateContact(err, conn) {
      if(err) { res.status(500).send(err); return; }
      r.table('contacts').filter(r.row("id").eq(req.params.id)).update(req.body).run(conn, function(err, results) {
          if(err) { res.status(500).send(err); return; }
          res.status(200).send(results);
      });
    };
    executeQuery(updateContact);
  };
  this.CreateContact = function CreateContact(req, res){
    function createContact(err, conn) {
      if(err) { res.status(500).send(err); return; }
      r.table('contacts').insert(req.body).run(conn, function(err, results) {
          if(err) { res.status(500).send(err); return; }
          res.status(201).send(results);
      });
    };
    executeQuery(createContact);
  };
  this.DeleteContact = function DeleteContact(req, res){
    function deleteContact(err, conn) {
      if(err) { res.status(500).send(err); return; }
      r.table('contacts').filter(r.row("id").eq(req.params.id)).delete().run(conn, function(err, results) {
          if(err) { res.status(500).send(err); return; }
          res.status(200).send(results);
      });
    };
    executeQuery(deleteContact);
  }
};

module.exports.ContactsAPI = ContactsAPI;

STEP 2: Subscribe to ChangeFeeds in RethinkDB with NodeJS

r = require('rethinkdb');

function Subscription (io) {
  "use strict";
  var connectionInfo = { host: 'localhost', port: 28015 };
  function executeQuery(fn){
    r.connect(connectionInfo, fn);
  };
  this.SubscribeContactsTable = function SubscribeContactsTable(){    
    function subscribeContactsTable(err, conn) {
      if(err) { throw err; }
      r.table('contacts').changes().run(conn, function(err, cursor){
        if(err) { throw err; }
        cursor.each(function(err, row){
          if (err) throw err;
          notifySubscribers(row);
        })
      })
    };
    executeQuery(subscribeContactsTable);
  };
  function notifySubscribers(row){ [1]
    console.log(JSON.stringify(row, null, 2));
    var defaultRoom = '#/'; [2]
    var room = '';
    var evt = '';
    if (!row.new_val){ [3]
      evt = 'deleted';
      room = '#/contact/' + row.old_val.id;
    }
    else if (!row.old_val){
      evt = 'created';
      room = '#/contact/' + row.old_val.id;
    }
    else{
      evt = 'updated';
      room = '#/contact/' + row.old_val.id;
    }
    io.to(defaultRoom).emit(evt, { data : row }); [4]
    io.to(room).emit(evt, { data : row }); [5]
  };
};

module.exports.Subscription = Subscription;

[1] – For each change in the batch (always one change in our application), this function will notify the subscribers.

[2] – SignalIO room names can be created and maintained based on the table name, and the resource id. Incidentally, these also map to the urls available in the application. The default room ‘#/’ maps to the main page of our application which allows the ui to appropriately let the users know that a change has happened.

[3] – When a change happens, RethinkDB sends the server the new value and the old value of the record. At this time, there’s no built-in way (that I am aware of) that lets the application know the type of event (Update, Create, Delete), therefore, it’s up to the application to determine this.

[4] – Notify everyone that’s currently on the homepage that a change has happened in the database.

[5] – Notify everyone that’s currently browsing the resource that a change has happened.

STEP 3: Build UI for the application using Angular

The application consists of two controllers – ContactCtrl and StreamCtrl. ContactCtrl manages all the UI actions for the application and StreamCtrl manages the messages from SocketIO.

    var ctrls = angular.module('controllers', []);
    ctrls.controller('ContactCtrl', ['$scope', '$http', '$location', '$routeParams', 'ContactService',
      function($scope, $http, $location, $routeParams, ContactService) {
          //... removed for brevity
      }
    ]);
    ctrls.controller('StreamCtrl', ['$scope', '$http', 'SocketService', function($scope, $http, SocketService) {
      //... removed for brevity
    }]);

STEP 4: Integrate the UI with NodeJS so that SocketIO can push updates to the UI

The services factory is responsible for starting the SocketService and managing the messages sent by the server.

One think to note with this approach is that the socket service needs to trigger appropriate methods at marker [1] and [2], in order for the change to be applied to the $scope of the angular controller, we have to call the .$apply() method.

services.factory("SocketService", function(){
    var socket = io();
  socket.on('joined', function(msg){
    updateRoom(msg);
  });
  socket.on('updated', function(msg){
      updateContact(msg, 'updated');
  });
  socket.on('created', function(msg){
      updateContact(msg, 'created');
  });
  socket.on('deleted', function(msg){
      updateContact(msg, 'deleted');
  });
  function updateContact(msg, event){
      updateRoom(msg);
      if (window.location.hash == '#/'){
        [1]
        angular.element('[ng-controller="ContactCtrl"]').scope().Methods.GetContacts();
    }
    [2]
    angular.element('[ng-controller="ContactCtrl"]').scope().Methods.OnServerEvent(msg, event);
    angular.element('[ng-controller="ContactCtrl"]').scope().$apply();
  }
  function updateRoom(msg){
      // Is there a better way?
        angular.element('[ng-controller="StreamCtrl"]').scope().updates.push(msg);
    angular.element('[ng-controller="StreamCtrl"]').scope().$apply();
  }
  return { socket: socket }
});

STEP 5: Handle updates and display updates to the user

The SocketService triggers the OnServerEvent method which is responsible for providing the notification to the user.

 

ctrls.controller('ContactCtrl', ['$scope', '$http', '$location', '$routeParams', 'ContactService',
  function($scope, $http, $location, $routeParams, ContactService) {
      // ... removed for brevity
    $scope.Methods.OnServerEvent = function(msg, event){ [1]
      if (event == 'deleted') $scope.Methods.OnContactDeleted();
      if (event == 'updated' )//&& sessionStorage.awaitEvent != ('updated:' + $scope.contact.id))
      {
        $scope.Methods.OnContactUpdated(msg.data);
      }
    }
    $scope.Methods.OnContactDeleted = function(msg){ [2]
      if (window.location.hash == '#/') { $scope.Methods.GetContacts(); return; }
      alert("The contact you are editing has been deleted. This contact is no longer available.");
      window.location.href = '/#/';
    }
    $scope.Methods.OnContactUpdated = function(data){ [3]
      if (window.location.hash == '#/') { $scope.Methods.GetContacts(); return; }
      console.log(data);
      // Compare updated contact model
      var syncRequired = false;
      if (data.new_val.id == $scope.contact.id){
        if ((data.new_val.firstName != $scope.contact.firstName) || (data.new_val.lastName != $scope.contact.lastName) || (data.new_val.email != $scope.contact.email) || (data.new_val.age != $scope.contact.age)){
          syncRequired = true;
        }
      }
      if (syncRequired){
        var update = confirm("The current contact has been updated. would you like to update your page?")
        if (update){
          // two options - look through form's unchanged fields and update the unchanged fields only using angular's built-in methods
          // simpler way - update the whole model at once.
          $scope.contact = data.new_val;
        }
      }
    };
  }
]);

[1] – The OnServerEvent method routes to the OnContactDeleted or OnContactUpdated based on the event type.

[2] – The OnContactDeleted method lets the users on the home page know a resource was deleted and trigger the list of contacts to be reloaded. If a user is currently accessing the deleted resource, the user will be notified the resource is no longer available and be sent to the homepage.

[3] – The OnContactUpdated method lets the users on the home page know a resource was updated and trigger the list of contacts to be reloaded. If the user is currently accessing the updated resource, the user will be notified if they want to get the latest version and continue working.

Development Tips:

During local development, it’s possible to spin up new instances of the NodeJS server listening on different ports: eg: PORT=1234 supervisor app.js (or PORT=1234 node app.js)

Screenshots:

Homepage
Figure 2: Homepage
update notification
Figure 3: Update Notification
deleted notification
Figure 4: Delete Notification

 

Suggestions on existing implementations:

  • It’s possible one might ask how to make use of RethinkDB without having to build a new application. It’s possible to create a service that uses RethinkDB primarily as an event broadcaster and use it as an update provider to all dependent services.
  • Using RethinkDB benefits an application where servers are not only linearly scaled instances of the same piece of code (eg. web server or api service) but also when a variety of services depend on the changes happening in the database.

References:
RethinkDB – https://www.rethinkdb.com/
NodeJS – https://nodejs.org/en/
SocketIO – http://socket.io/
Github Source Code – https://github.com/saichaitanya88/pushnotification.prototype

Microservices and future of Software Development

I found a recent podcast on software engineering radio[1] to be insightful on the state and future of software career. With more organizations opting to implement an SOA, Microservice architecture and like on light weight frameworks, it appears that their decision is also influenced by software developers being open to switch jobs once every two years (on average as I speculate). With an increasing turnover in employees, it would make sense to build your application using the architecture that is not susceptible to loss of knowledge. The idea that a microservice can be built, tested, and deployed within a month means that a subset of software development jobs will transition from a 5-6 year careers into a 6-12 month gigs. Websites like fiverr, elance, freelancer, and upwork play an increasing role in facilitating this transition.

[1] http://www.se-radio.net/2015/12/se-radio-episode-245-john-sonmez-on-marketing-yourself-and-managing-your-career/