Saturday, April 13, 2013

Write file to HDFS/Hadoop Read File From HDFS/Hadoop Using Java

 

import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;

/**
*
* @author Shashwat Shriparv
* @email dwivedishashwat@gmail.com
* @Web helpmetocode.blogspot.com
*/
public class WritetoHDFSReadFromHDFSWritToLocal {
private static byte[] buffer;
private static int bytesRead;

public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {

FileSystem fs =new DistributedFileSystem();

fs.initialize(new URI("hdfs://master1:9000/"), new Configuration());

final File folder = new File("C:\\Shared\\files");
for (final File fileEntry : folder.listFiles()) {
if (fileEntry.isDirectory()) {
readallfilefromfolder(fileEntry);
} else {
fs.copyFromLocalFile(new Path("C:\\shashwat\\files"+fileEntry.getName()),new Path("/Test/"));
System.out.println(fileEntry.getName());
fs.copyToLocalFile(new Path("/Test/"+fileEntry.getName()),new Path("d:\\shashwat\\"));
}
}

//fs.copyFromLocalFile(new Path("
C:\\Shared\\HadoopLibs"),new Path("/Test/1.jpg"));

System.out.println("
Done");
}
public static void readallfilefromfolder(final File folder) {
for (final File fileEntry : folder.listFiles()) {
if (fileEntry.isDirectory()) {
readallfilefromfolder(fileEntry);
} else {
System.out.println(fileEntry.getName());
}
}
}
}


Note : While writing to hdfs create a directory and change its permission to 777 to avoid security related exception.

No comments:

Post a Comment

Live

Your Ad Here