Skip to Content

Issues while executing Node js ReadStream and WriteStream codes?

Hi All,

I have written a code that does the following

1) Read Result Set from SQL query

2) Store the RS output in a file

3) Read the file and set into a JSON

4) Insert the JSON into a table

Code is below

/*---------------------------------- JSON Input------------------------------------------ */
var conn = $.db.getConnection();
var query1 = "select * from \"Toll::User.json\"";
var oStatement = conn.prepareStatement(query1);
oStatement.execute();
var oResultSet = oStatement.getResultSet();
console.log(oResultSet);
console.log("------------------------After oResultSet");
var data;
var dataList = [];
while (oResultSet.next()) {
	data = {};
	data.fname = oResultSet.getString(1);
	data.lname = oResultSet.getString(2);
	data.ag = oResultSet.getInteger(3);
	dataList.push(data);
}

console.log(dataList);
console.log("---------------------------After dataList");
/*---------------------------------- Storing Data in a JSON File - File gets stored in HANA SHARED STORAGE------------------------------------------ */

var fs = $.require("fs");
var stream;
var data1 = JSON.stringify(dataList);
stream = fs.createWriteStream("nod.json");

stream.write(data1);
console.log(data1);
console.log("--------------------------------After Writestream");
/*---------------------------------- Reading stored JSON data - Readin FILE from HANA SHARED STORAGE------------------------------------------ */
console.log("--------------------------------Before REadstream");


var chunk = {};
stream = fs.createReadStream("nod.json");
stream.on("data", function myfunc(datum) {
    chunk = JSON.parse(datum);
    console.log(chunk);
}); 


console.log("-------------------------------After ReadStream");

/*---------------------------------- Wiritng stored JSON data into Database Table------------------------------------------ */

for (var i in chunk) {
	
	var query = conn.prepareStatement("INSERT INTO \"Tooll::User.insert\" VALUES ('" + chunk[i].fname + "','" + chunk[i].lname +
		"'," + chunk[i].ag + ")");
	query.execute();
	conn.commit();
	query.close();
	conn.close();
	console.log("-------------------------------Success");

}
oStatement.close();
oResultSet.close();

$.response.contentType = "application/json";
$.response.contentType = "text/plain";
$.response.setBody(chunk);

The problem here is "Step 4" is getting executed ahead of "Step 3", so I'm not able to insert data into the table since "chunk" executes slowly. Please help to solve this issue

Add comment
10|10000 characters needed characters exceeded

  • Get RSS Feed

1 Answer

  • Best Answer
    Dec 30, 2018 at 03:40 PM

    That is because you are assuming that the async stream operation is finished before you execute the sync XSJS coding which should insert the data into a table.

    Personally I would avoid mixing up async and sync coding.

    But in your case, the save step could be done when the "end" event is raised for the stream.

    Add comment
    10|10000 characters needed characters exceeded