I have a task to read CLOB files from Oracle 9i Database and write them to files. What I have works for small files but in cases of larger files, around 200MB this is taking around 5 hours for one file which is completely unacceptable. I am sure this can be done faster but I am not sure how. This is what I have:
//get the Clob record from the database and convert it to String
public String getClobRecord() {
Connection conn;
Clob xmlCont;
ResultSet rset;
PreparedStatement stmt;
try {
conn = db.prepareConn();
String sql = "select a.clob_clm from xml_statement a where period_id = ?";
stmt = conn.prepareStatement(sql);
stmt.setInt(1, pid);
rset = stmt.executeQuery();
while (rset.next()) {
xmlCont = rset.getClob(1);
xml = ClobStringConversion(xmlCont);
}
stmt.close();
DBConnect.closeConn(conn);
} catch (SQLException asd) {
log.fatal(asd.getMessage());
} catch (IOException asd) {
log.fatal(asd.getMessage());
}
return xml;
}
//My CLOB to String Conversion Method
public static String ClobStringConversion(Clob clb) throws SQLException, IOException {
if (clb == null) {
return "";
}
StringBuilder str = new StringBuilder();
String strng;
BufferedReader br = new BufferedReader(clb.getCharacterStream());
while ((strng = br.readLine()) != null) {
str.append(strng);
}
return str.toString();
}
//Write the String to File
public void writeXmlToFile(String fileDir, String xmlFileName) {
File xmlfile = new File(fileDir + "/" + xmlFileName);
xml = this.getClobRecord();
try {
BufferedWriter bw = new BufferedWriter(new FileWriter(xmlfile));
bw.write(formatXmlFile());
bw.close();
} catch (IOException asd) {
log.fatal(asd.getMessage());
}
}
What Exactly Should I change to Accomodate very Large CLOB files? EDIT: A smaller sample of the resulting file:
Aucun commentaire:
Enregistrer un commentaire