I have this json:
var myJSON = '{"kind": "person", "fullName": "Rivka3"}';
I'm trying to uploed it to bigquery, using createReadStream. when I save it localy I succeed:
fs.writeFile("/tmp/bq_json_file_new.json", myJSON, function(err){});
fs.createReadStream("/tmp/bq_json_file_new.json")
.pipe(table.createWriteStream(metadata))
.on('complete', function(job) {
job
.on('error', console.log)
.on('complete', function(metadata) {
console.log('job completed', metadata);
});
});
now I'm trying to do this without saving it localy - using a buffer:
fs.createReadStream(new Buffer(myJSON, "utf8"))
.pipe(table.createWriteStream(metadata))
.on('complete', function(job) {
job
.on('error', console.log)
.on('complete', function(metadata) {
console.log('job completed', metadata);
});
});
but i recieve this error:
fs.js:575
binding.open(pathModule._makeLong(path),
TypeError: path must be a string
using stream
solves the problem:
var stream = require('stream');
var bufferStream = new stream.PassThrough();
bufferStream.end(new Buffer(myJSON));
bufferStream.pipe(table.createWriteStream(metadata))
.on('complete', function(job) {
job
.on('error', console.log)
.on('complete', function(metadata) {
console.log('job completed', metadata);
});
});