Serializing data with Avro in node js

Pistolo picture Pistolo · Apr 8, 2014 · Viewed 10.4k times · Source

I would like to serialize data from a JSON object and send it throught the network with kafka as an end. Now I have an avro schema in a file, that determinate the fields necessary to send to kafka for the logging system:

{"namespace": "com.company.wr.messages",
   "type": "record",
   "name": "Log",
   "fields": [
       {"name": "timestamp", "type": "long"},
       {"name": "source", "type": "string"},
       {"name": "version", "type": "string"},
       {"name": "ipAddress", "type": "string"},
       {"name": "name", "type": "string"},
       {"name": "level", "type": "string"},
       {"name": "errorCode", "type": "string"},
       {"name": "message", "type": "string"}
       ]
}

I am using a node packages 'avro-schema', I tried others but none of then are working well, I just need to serialize in an avro way from node js.

Answer

mtth picture mtth · Sep 29, 2015

With avsc:

var avro = require('avsc');

// Parse the schema.
var logType = avro.parse({
  "namespace": "com.company.wr.messages",
  "type": "record",
  "name": "Log",
  "fields": [
    {"name": "timestamp", "type": "long"},
    {"name": "source", "type": "string"},
    {"name": "version", "type": "string"},
    {"name": "ipAddress", "type": "string"},
    {"name": "name", "type": "string"},
    {"name": "level", "type": "string"},
    {"name": "errorCode", "type": "string"},
    {"name": "message", "type": "string"}
  ]
});

// A sample log record.
var obj = {
  timestamp: 2313213,
  source: 'src',
  version: '1.0',
  ipAddress: '0.0.0.0',
  name: 'foo',
  level: 'INFO',
  errorCode: '',
  message: ''
};

// And its corresponding Avro encoding.
var buf = logType.toBuffer(obj);

You can find more information on the various encoding methods available here.