我想序列化来自JSON对象的数据,并通过网络将其发送到kafka作为结束.现在我在一个文件中有一个avro模式,它确定了为日志记录系统发送到kafka所需的字段:
{"namespace": "com.company.wr.messages", "type": "record", "name": "Log", "fields": [ {"name": "timestamp", "type": "long"}, {"name": "source", "type": "string"}, {"name": "version", "type": "string"}, {"name": "ipAddress", "type": "string"}, {"name": "name", "type": "string"}, {"name": "level", "type": "string"}, {"name": "errorCode", "type": "string"}, {"name": "message", "type": "string"} ] }
我正在使用节点包'avro-schema',我尝试了其他的但是当时没有一个工作得很好,我只需要从节点js以avro方式序列化.
用avsc
:
var avro = require('avsc'); // Parse the schema. var logType = avro.parse({ "namespace": "com.company.wr.messages", "type": "record", "name": "Log", "fields": [ {"name": "timestamp", "type": "long"}, {"name": "source", "type": "string"}, {"name": "version", "type": "string"}, {"name": "ipAddress", "type": "string"}, {"name": "name", "type": "string"}, {"name": "level", "type": "string"}, {"name": "errorCode", "type": "string"}, {"name": "message", "type": "string"} ] }); // A sample log record. var obj = { timestamp: 2313213, source: 'src', version: '1.0', ipAddress: '0.0.0.0', name: 'foo', level: 'INFO', errorCode: '', message: '' }; // And its corresponding Avro encoding. var buf = logType.toBuffer(obj);
您可以在此处找到有关各种编码方法的更多信息.