problem description
hadoop is deployed on the ubuntu server and HDFS and yarn are started
an error occurred when the local windows java called the hadoop api copyFromLocalFile file
the environmental background of the problems and what methods you have tried
error is shown in figure
related codes
/ / Please paste the code text below (do not replace the code with a picture
related code:
package com.shican.hadoop;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HadoopOne {
public static FileSystem fs;
public final static String HDFS_URI_KEY = "fs.defaultFS";
public final static String HDFS_URI_VALUE = "hdfs://shicanLeft:9000"; //hosts
static {
try {
Configuration conf = new Configuration();
conf.set(HDFS_URI_KEY, HDFS_URI_VALUE);
FileSystem fileSystem = FileSystem.get(conf);
fs = fileSystem;
} catch (IOException e) {
e.printStackTrace();
}
}
public void hdfsUpload() {
try {
fs.copyFromLocalFile(new Path("c:/SangforServiceClient_201872.log"), //
new Path("/SangforServiceClient_201872.log"));
} catch (IOException e) {
e.printStackTrace();
}
}
}