Uploading .csv and xlsx files on HCP Trial – Part 2
In the previous blog we described how to set up an ODBC Connection with the HCP Trial.
In the second part we will see how to use Node.js
First we have to download and install node.js from here and follow the instructions as described here.
When everything has been set up, you will need to open a cmd window inside the folder where you will run the following node.js script.
(Note: the paths/variables/file names are changed for privacy reasons)
In the previous blog we described how to set up an ODBC Connection with the HCP Trial.
In the second part we will see how to use Node.js
First we have to download and install node.js from here and follow the instructions as described here.
When everything has been set up, you will need to open a cmd window inside the folder where you will run the following node.js script.
But what does this code do?
It looks inside a specific folder and searches for xlsx files, in order to upload all of them into HCP.
Another parameter that has to be mentioned is that it looks for sheets inside those xlsx files that were found.
(Note: the paths/variables/file names are changed for privacy reasons)
var XLSX = require(‘xlsx’);
var fs = require(‘fs’);
var hdb = require(‘hdb’);
var csv = require(‘fast-csv’);
var path = require(‘path’);
var array_of_files = [];
var workbook = XLSX.readFile(‘C:/Users/cuser/Desktop/data/new/m3.xlsx’);
var sheet_name_list = workbook.SheetNames;
- console.log(“sheets are: “,sheet_name_list.length);
var hdbclient = hdb.createClient({
host: ‘localhost’,
port: 30115,
user: ‘DEV_’,
password: ‘xxxxx’
});
- hdbclient.on(‘error’,function(error){
return console.errro(error);
});
- hdbclient.connect(function(error){
if(error){
return console.error(error);
};
upload_data();
});
var sql = ‘DELETE FROM “NEO_ “.”EX”‘;
hdbclient.exec(sql, function(error, affectedRows){
if(error){
return console.log(error);
}
//console.log(‘Rows deleted: ‘ + affectedRows);
});
var counter = 0;
var array_of_values=[[]];
var i = 0;
var j = 0;
function upload_data(){
var man_path = ‘C:/Users/cuser/Desktop/data/new/’;
fs.realpath(man_path, function(err, path) {
if (err) {
console.log(err);
return;
}
console.log(‘Path is : ‘ + path);
});
fs.readdir(man_path, function(err, files)
{
if (err) return;
files.forEach(function(f) {
//console.log(‘Files: ‘ + f);
//console.log(‘the file is, ‘+f);
var arr = f.split(“.”);
//console.log(‘the array 0 = ‘+ arr[0]);
//console.log(‘the array 1 = ‘+ arr[1]);
if (arr[1] == “xlsx”)
{
console.log(‘xsls files found ! ! ! ‘);
array_of_files.push(f);
}
});
console.log(“array of files has: “,array_of_files);
for(var i = 0; i < array_of_files.length;i++) // {
var workbookname = man_path+array_of_files[i];
var workbook2 = XLSX.readFile(workbookname);
console.log(“workbook name is: “, workbookname);
var sheet_name_list = workbook2.SheetNames;
console.log(“sheet_name_list is: “, sheet_name_list);
sheet_name_list.forEach(function(y)
{
var worksheet = workbook2.Sheets[y];
var headers = {};
var data = [];
for(z in worksheet)
{
if(z[0] === ‘!’) continue;
//parse out the column, row, and value
var col = z.substring(0,1);
var row = parseInt(z.substring(1));
if (col == ‘F’ || col == ‘G’)
{
var value = worksheet[z].w;
}
else
{
var value = worksheet[z].v;
}
//store header names
if(row == 1)
{
headers[col] = value;
continue;
}
if(!data[row]) data[row]={};
data[row][headers[col]] = value;
fs.appendFile(‘mess.txt’, value+”\r\n”, function (err) {
});
}
data.shift();
data.shift();
var for_counter= 0;
for (var t in data)
{
console.log(“counter_for is: “, for_counter);
for_counter++;
var sql = ‘INSERT INTO “NEO_ “.”EX” VALUES (‘;
for (var h in data[t])
{
//console.log(data[t][h]);
//console.log(“—“);
if (counter != 8)
{
sql = sql +”‘”+data[t][h]+”‘,”;
counter++;
}
else
{
sql = sql +”‘”+data[t][h]+”‘”;
counter = 0;
}
}
sql = sql+”)”;
hdbclient.exec(sql,function(error, affectedRows)
{
if(error)
{
return console.error(error);
};
console.log(‘Rows inserted: ‘ + affectedRows);
});
console.log(data.length);
}
hdbclient.close();
console.log(‘Upload finished’);
}); // for each sheet name
} // for every file in folder
}); // fs read dir
};
In order to execute your script you have to type inside the folder you are:
Node the_name_of_script.js
When the uploading is finished you should be able to see all data inside your HCP table.
Please comment if you have any questions,
Best,
Christoforos
Would you mind sharing the content of your sample file and HANA table's structure to make it easier for us? 🙂
Well the content has nothing to do with the solution !
This is why we design algorithms 😛
Btw I got rid of the counter and used this [may not be the most elegant still, but does the job of removing the comma after the last field]:
for (var h in data[t]) {
sql = sql + "'" + data[t][h] + "',";
}
sql = sql.substr(0, sql.length - 1) + ")";
Hi Christoforos Verras,
Thanks for the Code and procedure. Pherhaps, if you can add some screen shots and db fields in HCP would be more helpful.
Regards,
Nagesh
Hi Nagesh,
Just modify the code on your case, depending on the columns your .csv file has.
Best,
Christoforos
Hi All,
I have created my trial SAP Cloud account and trying to upload a csv file. I was able to create a tunnel but when I am running a Node JS and getting below error:
buffer.js:577
return buf.latin1Slice(start, end);
^
Error: "toString()" failed
at stringSlice (buffer.js:577:20)
at Buffer.toString (buffer.js:633:10)
at prn_to_sheet (/Users/ctsuser1/node_modules/xlsx/xlsx.js:6309:27)
at Object.prn_to_workbook [as to_workbook] (/Users/ctsuser1/node_modules/xlsx/xlsx.js:6320:63)
at read_prn (/Users/ctsuser1/node_modules/xlsx/xlsx.js:18727:13)
at readSync (/Users/ctsuser1/node_modules/xlsx/xlsx.js:18754:9)
at Object.readFileSync (/Users/ctsuser1/node_modules/xlsx/xlsx.js:18759:9)
at Object.<anonymous> (/Users/ctsuser1/Downloads/nyc-parking-tickets/fils/hana.js:13:21)
at Module._compile (module.js:635:30)
at Object.Module._extensions..js (module.js:646:10)
Looking for some urgent help and thanks in advance for the help.