import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.net.URL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
public class seek {
/**
* 通过API实现seek操作
*
* @author Administrator
*
* [hadoop@hadoop01 ~]$ hdfs dfs -chmod -R a+w /user/hadoop
* [hadoop@hadoop01 ~]$ hdfs dfs -ls -R /
*
*
*/
@Test
public void seekfile() throws Exception {
//创建configuration对象,有个默认的加载顺序,先从core-default.xml,再到src目录中的文件,这里
//我们给定了
Configuration conf = new Configuration();
//通过conf的configuration对象创建了该分布式文件系统fs,默认如果不指定文件的话为本地文件系统
FileSystem fs = FileSystem.get(conf);
//定义一个URL的字符串
String file = “hdfs://hadoop01:9000/user/hadoop/data2/kaola.jpg”;
//通过一个URL的字符串构建一个path对象
Path path = new Path(file);
FSDataInputStream in = fs.open(path);
//流对拷
IOUtils.copyBytes(in, new FileOutputStream(“E:/kaola1.jpg”), 1024, false);
//重新定位到文件起始点,只有FSDataInputStream才有实现seek接口,FSDataOutputStream没有
in.seek(0);
IOUtils.copyBytes(in, new FileOutputStream(“E:/kaola2.jpg”), 1024, true);
}
}
原创文章,作者:kepupublish,如若转载,请注明出处:https://blog.ytso.com/193787.html