java - error while deserializing object of size greater than 95Kb, working fine for less than 95Kb objects -
i getting streamcorruptedexception
when deserialize objects greater 95kb, code works fine objects less 95kb. here code:
<!-- language: lang-java --> package hadoop; import java.io.bytearrayinputstream; import java.io.bytearrayoutputstream; import java.io.ioexception; import java.io.objectinputstream; import java.io.objectoutputstream; import java.util.arraylist; import java.util.collections; import java.util.concurrent.timeunit; import org.apache.hadoop.conf.configuration; import org.apache.hadoop.fs.fsdatainputstream; import org.apache.hadoop.fs.fsdataoutputstream; import org.apache.hadoop.fs.filesystem; import org.apache.hadoop.fs.path; public class hadoopclient { static arraylist<long> list = new arraylist<long>(); public long readfilefromhdfs(string source) throws ioexception { configuration obj = new configuration(); obj.set("fs.default.name", "hdfs://127.0.0.1:9000"); filesystem fs = filesystem.get(obj); path sourcepath = new path(fs.gethomedirectory() + source + ".txt"); fsdatainputstream in = fs.open(sourcepath); byte[] b = new byte[in.available()]; final long starttime = system.nanotime(); in.read(b, 0, in.available()); final long endtime = system.nanotime(); in.close(); fs.close(); testobject objj = null; try { bytearrayinputstream bi = new bytearrayinputstream(b); objectinputstream si = new objectinputstream(bi); objj = (testobject) si.readobject(); objj.printhello(); } catch (exception e) { system.out.println(e); } return endtime - starttime; } public long copybuffertohdfs(byte[] array, string destfilename, boolean replace, short replicationfacotr, long blocksize, int buffersize) throws ioexception { configuration obj = new configuration(); obj.set("fs.default.name", "hdfs://127.0.0.1:9000"); filesystem fs = filesystem.get(obj); string s = fs.gethomedirectory() + destfilename + ".txt"; path outfile = new path(s); final long starttime = system.nanotime(); fsdataoutputstream out = fs.create(outfile, replace, buffersize, replicationfacotr, blocksize); final long endtime = system.nanotime(); out.write(array); out.close(); return endtime - starttime; } public static void main(string[] args) throws exception { hadoopclient hadoopjava = new hadoopclient(); short replicationfactor; long blocksize; int buffersize; int noofbytes; int noofentries; boolean replacement; string str = ""; (int testcases = 0; testcases < args.length; testcases += 6) { blocksize = integer.parseint(args[0 + testcases]); replicationfactor = short.parseshort(args[1 + testcases]); buffersize = integer.parseint(args[2 + testcases]); noofbytes = integer.parseint(args[3 + testcases]); noofentries = integer.parseint(args[4 + testcases]); replacement = boolean.parseboolean(args[5 + testcases]); testobject testobject = new testobject(); testobject.setstring(noofbytes); str = hadoopjava.tostringmethod(testobject); hadoopjava.publishbytearraytimer(str.getbytes("windows-1252"), noofentries, replacement, replicationfactor, blocksize, buffersize); hadoopjava.retrievebytearraytimer(noofentries); collections.sort(list); (long ll : list) { system.out.println(ll); } system.out.println(""); } } public string tostringmethod(testobject test) { string serializedobject = ""; try { bytearrayoutputstream bo = new bytearrayoutputstream(); objectoutputstream = new objectoutputstream(bo); so.writeobject(test); so.flush(); so.close(); serializedobject = bo.tostring("windows-1252"); bo.flush(); bo.close(); } catch (exception e) { system.out.println(e); } return serializedobject; } public void publishbytearraytimer(byte[] array, int numberofinsertions, boolean replace, short replicationfactor, long blocksize, int buffersize) throws ioexception, interruptedexception { long timetaken = 0; (int filename = 0; filename < numberofinsertions; filename++) { timetaken = copybuffertohdfs(array, string.valueof(filename), replace, replicationfactor, blocksize, buffersize); list.add(timetaken / 1000); timeunit.microseconds.sleep(10000); } } public void retrievebytearraytimer(integer numberofinsertions) throws ioexception { long timetaken = 0; (int filename = 0; filename < numberofinsertions; filename++) { timetaken += readfilefromhdfs(string.valueof(filename)); } } } class testobject implements serializable { char chars[]; string str; public string setstring(int numberofbytes) { numberofbytes = numberofbytes / 2; chars = new char[numberofbytes]; arrays.fill(chars, 'a'); str = new string(chars); return str; } public string getstring() { return str; } public void printhello() { system.out.println("hello tester"); } }
this error trace:
java.io.streamcorruptedexception: invalid type code: 00
please help.
i attaching screenshot of error trace also:
the sample input have given follows:
hi, attaching full stacktrace of error also:
java.io.streamcorruptedexception: invalid type code: 00 @ java.io.objectinputstream.defaultreadfields(objectinputstream.java:1946) @ java.io.objectinputstream.readserialdata(objectinputstream.java:1870) @ java.io.objectinputstream.readordinaryobject(objectinputstream.java:1752) @ java.io.objectinputstream.readobject0(objectinputstream.java:1328) @ java.io.objectinputstream.readobject(objectinputstream.java:350) @ hadoop.hadoopclient.readfilefromhdfs(hadoopclient.java:47) @ hadoop.hadoopclient.retrievebytearraytimer(hadoopclient.java:159) @ hadoop.hadoopclient.main(hadoopclient.java:114)
please thanks...
most problem here:
byte[] b = new byte[in.available()]; in.read(b, 0, in.available());
in general wrong assumption read data code.
you can use method apache commons-io: org.apache.commons.io.ioutils#tobytearray(java.io.inputstream)
or:
bytearrayoutputstream bos = new bytearrayoutputstream(in.available()); byte[] buf = new byte[4096*16]; int c; while((c=in.read(buf))!=-1){ bos.write(buf, 0, c); } byte[] data = bos.tobytearray();
Comments
Post a Comment