Hadoop学习–int类型的序列化和反序列化–day07

import java.io.ByteArrayInputStream;

import java.io.ByteArrayOutputStream;

import java.io.DataInputStream;

import java.io.DataOutputStream;

import org.apache.hadoop.io.IntWritable;

import org.apache.hadoop.io.ShortWritable;

import org.junit.Test;

public class TestWritable {

/*

* test writable

*/

@Test

public void test1() throws Exception {

IntWritable age = new IntWritable();

age.set(163);

ByteArrayOutputStream baos = new ByteArrayOutputStream();

DataOutputStream dataOut = new DataOutputStream(baos);

// 序列化

age.write(dataOut);

dataOut.close();

byte[] bts = baos.toByteArray();

for (byte by : bts) {

System.out.println(by);

}

System.out.println(bts.toString());

// 反序列化

IntWritable age2 = new IntWritable();

age2.readFields(new DataInputStream(new ByteArrayInputStream(baos

.toByteArray())));

System.out.println(age2.get());

ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());

DataInputStream dis = new DataInputStream(bais);

System.out.println(dis.readInt());

// //测试用short类型方式的读取

// ShortWritable short1 = new ShortWritable();

// short1.readFields(dis);

// System.out.println(short1.get());

// ShortWritable short2 = new ShortWritable();

// short2.readFields(dis);

// System.out.println(short2.get());

}

}

原创文章,作者:Maggie-Hunter,如若转载,请注明出处:https://blog.ytso.com/tech/opensource/192042.html

(0)
上一篇 2021年11月14日 22:22
下一篇 2021年11月14日 22:23

相关推荐

发表回复

登录后才能评论