package com.Main;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class Main {
public static void main(String[] args) throws IOException {
//Source file in the local file system
String localSrc = args[0];
//Destination file in HDFS
String dst = args[1];
//Input stream for the file in local file system to be written to HDFS
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
//Get configimport org.apache.commons.configuration.Configuration;uration of Hadoop system
Configuration conf = new Configuration();
System.out.println("Connecting to -- "+conf.get("fs.defaultFS"));
//Destination file in HDFS
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst));
//Copy file from local to HDFS
IOUtils.copyBytes(in, out, 4096, true);
System.out.println(dst + " copied to HDFS");
}
}
AM得到以下錯誤消息「異常在線程 」主要「 java.lang.ArrayIndexOutOfBoundsException:0 在com.Main.Main.main(Main.java:22) 「複製的Json從本地平面文件到HDFS
我JSON文件在我的地方,必須移動,在HDFS 例: {」 刪除 「:」 Ef77xvP 「 」時間「:1509073785106}, { 」刪除「:」 2YXsF7r 「,」time「:1509073795109}
你是如何運行這個?你在運行時提供JAR參數嗎? – philantrovert
'hdfs dfs -put file.json' ...這不是那麼難 –