1. 程式人生 > >Hadoop windows 本地執行Mapreduce 報錯 Error while running command to get file permissions

Hadoop windows 本地執行Mapreduce 報錯 Error while running command to get file permissions

package cn.hadoop.mr.flowsum;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class FlowSumRunner {
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		Configuration conf = new Configuration();
		Job sumjob = Job.getInstance(conf);
		
		sumjob.setJarByClass(FlowSumRunner.class);
		
		sumjob.setMapperClass(FlowSumMapper.class);
		sumjob.setReducerClass(FlowSumReducer.class);
		
		sumjob.setMapOutputKeyClass(Text.class);
		sumjob.setMapOutputValueClass(FlowBean.class);
		
		sumjob.setOutputKeyClass(Text.class);
		sumjob.setOutputValueClass(FlowBean.class);
		
		FileInputFormat.setInputPaths(sumjob, new Path("D:\\tmp\\"));
		FileOutputFormat.setOutputPath(sumjob, new Path("D:\\tmp\\output"));
		sumjob.waitForCompletion(true);
	}
}

報錯資訊如下:

SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/E:/ApacheSoftware/apache-hive-1.1.1-bin/apache-hive-1.1.1-bin/lib/hive-jdbc-1.1.1-standalone.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/D:/hadoop-2.6.0/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
2016-08-09 17:13:23,574 INFO  [main] Configuration.deprecation (Configuration.java:warnOnceIfDeprecated(1049)) - session.id is deprecated. Instead, use dfs.metrics.session-id
2016-08-09 17:13:23,576 INFO  [main] jvm.JvmMetrics (JvmMetrics.java:init(76)) - Initializing JVM Metrics with processName=JobTracker, sessionId=
2016-08-09 17:13:23,762 WARN  [main] mapreduce.JobSubmitter (JobSubmitter.java:copyAndConfigureFiles(153)) - Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
2016-08-09 17:13:23,764 WARN  [main] mapreduce.JobSubmitter (JobSubmitter.java:copyAndConfigureFiles(261)) - No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
2016-08-09 17:13:23,793 INFO  [main] mapreduce.JobSubmitter (JobSubmitter.java:submitJobInternal(545)) - Cleaning up the staging area file:/tmp/hadoop-Administrator/mapred/staging/Administrator1247505259/.staging/job_local1247505259_0001
Exception in thread "main" java.lang.RuntimeException: Error while running command to get file permissions : ExitCodeException exitCode=-1073741515: 
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:538)
	at org.apache.hadoop.util.Shell.run(Shell.java:455)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:808)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:791)
	at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
	at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:582)
	at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:557)
	at org.apache.hadoop.fs.LocatedFileStatus.<init>(LocatedFileStatus.java:42)
	at org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1699)
	at org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1681)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:303)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:264)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:385)
	at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:597)
	at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:614)
	at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:492)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Unknown Source)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
	at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
	at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
	at cn.hadoop.mr.flowsum.FlowSumRunner.main(FlowSumRunner.java:30)

	at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:620)
	at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:557)
	at org.apache.hadoop.fs.LocatedFileStatus.<init>(LocatedFileStatus.java:42)
	at org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1699)
	at org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1681)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:303)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:264)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:385)
	at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:597)
	at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:614)
	at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:492)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Unknown Source)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
	at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
	at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
	at cn.hadoop.mr.flowsum.FlowSumRunner.main(FlowSumRunner.java:30)

解決方案:

報錯原因是因為windows的FileInputFormat.setInputPaths裡面不能寫路徑,改成具體的檔案後就不報錯了。


FileInputFormat.setInputPaths(sumjob, new Path("D:\\tmp\\"));

改為

FileInputFormat.setInputPaths(sumjob, new Path("D:\\tmp\\HTTP_20130313143750.dat"));