使用sqoop將mysql 資料匯入hdfs時各種報錯
資訊
-
18/06/29 10:45:08 ERROR sqoop.Sqoop: Got exception running Sqoop: java.lang.RuntimeException: java.lang.RuntimeException: java.sql.SQLException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.
-
java.lang.RuntimeException: java.lang.RuntimeException: java.sql.SQLException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.
-
at org.apache.sqoop.mapreduce.db.DBInputFormat.setDbConf(DBInputFormat.java:170)
-
at org.apache.sqoop.mapreduce.db.DBInputFormat.setConf(DBInputFormat.java:161)
-
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
-
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
-
at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:299)
-
at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:318)
-
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:196)
-
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1290)
-
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1287)
-
at java.security.AccessController.doPrivileged(Native Method)
-
at javax.security.auth.Subject.doAs(Subject.java:422)
-
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1758)
-
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1287)
-
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1308)
-
at org.apache.sqoop.mapreduce.ImportJobBase.doSubmitJob(ImportJobBase.java:200)
-
at org.apache.sqoop.mapreduce.ImportJobBase.runJob(ImportJobBase.java:173)
-
at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:270)
-
at org.apache.sqoop.manager.SqlManager.importTable(SqlManager.java:692)
-
at org.apache.sqoop.manager.MySQLManager.importTable(MySQLManager.java:127)
-
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:520)
-
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:628)
-
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
-
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
-
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
-
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
-
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
-
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
-
Caused by: java.lang.RuntimeException: java.sql.SQLException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.
-
at org.apache.sqoop.mapreduce.db.DBInputFormat.getConnection(DBInputFormat.java:223)
-
at org.apache.sqoop.mapreduce.db.DBInputFormat.setDbConf(DBInputFormat.java:168)
-
... 26 more
-
Caused by: java.sql.SQLException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.
-
at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:127)
-
at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:95)
-
at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:87)
-
at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:61)
-
at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:71)
-
at com.mysql.cj.jdbc.exceptions.SQLExceptionsMapping.translateException(SQLExceptionsMapping.java:85)
-
at com.mysql.cj.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:440)
-
at com.mysql.cj.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:230)
-
at com.mysql.cj.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:226)
-
at java.sql.DriverManager.getConnection(DriverManager.java:664)
-
at java.sql.DriverManager.getConnection(DriverManager.java:247)
-
at org.apache.sqoop.mapreduce.db.DBConfiguration.getConnection(DBConfiguration.java:302)
-
at org.apache.sqoop.mapreduce.db.DBInputFormat.getConnection(DBInputFormat.java:216)
-
... 27 more
-
Caused by: com.mysql.cj.exceptions.WrongArgumentException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.
-
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
-
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
-
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
-
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
-
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:59)
-
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:103)
-
at com.mysql.cj.conf.DefaultPropertySet.initializeProperties(DefaultPropertySet.java:194)
-
at com.mysql.cj.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:382)
-
... 33 more
-
Caused by: com.mysql.cj.exceptions.CJException: The connection property 'zeroDateTimeBehavior' acceptable values are: 'CONVERT_TO_NULL', 'EXCEPTION' or 'ROUND'. The value 'convertToNull' is not acceptable.
-
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
-
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
-
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
-
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
-
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:59)
-
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:103)
-
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:149)
-
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:122)
-
at com.mysql.cj.conf.EnumPropertyDefinition.parseObject(EnumPropertyDefinition.java:64)
-
at com.mysql.cj.conf.EnumPropertyDefinition.parseObject(EnumPropertyDefinition.java:39)
-
at com.mysql.cj.conf.AbstractRuntimeProperty.setFromString(AbstractRuntimeProperty.java:96)
-
at com.mysql.cj.conf.AbstractRuntimeProperty.initializeFrom(AbstractRuntimeProperty.java:91)
-
at com.mysql.cj.conf.AbstractRuntimeProperty.initializeFrom(AbstractRuntimeProperty.java:75)
-
at com.mysql.cj.conf.DefaultPropertySet.initializeProperties(DefaultPropertySet.java:191)
-
... 34 more
-
Caused by: java.lang.IllegalArgumentException: No enum constant com.mysql.cj.conf.PropertyDefinitions.ZeroDatetimeBehavior.CONVERTTONULL
-
at java.lang.Enum.valueOf(Enum.java:238)
-
at com.mysql.cj.conf.EnumPropertyDefinition.parseObject(EnumPropertyDefinition.java:62)
-
... 39 more
原命令是:
sqoop import --connect jdbc:mysql://localhost:3306/FileManager --username root --password #### --table user
將 fileManager資料倉庫中的表user 匯入到 hdfs
在連線mysql時的連線中加入
?zeroDateTimeBehavior=EXCEPTION
後來修改為
sqoop import --connect jdbc:mysql://localhost:3306/FileManager?zeroDateTimeBehavior=EXCEPTION --username root --password #### --table user
就可以執行了
問題1:sqoop import匯入時報java.lang.ClassNotFoundException: org.json.JSONObject 錯誤
-
[[email protected] lib]# sqoop import --connect jdbc:mysql://10.1.32.8:3306/test --username sqoop --password sqoop --table t1 -m 1
-
16/06/07 08:48:59 INFO sqoop.Sqoop: Running Sqoop version: 1.4.6-cdh5.7.0
-
16/06/07 08:48:59 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.
-
16/06/07 08:48:59 INFO manager.MySQLManager: Preparing to use a MySQL streaming resultset.
-
16/06/07 08:48:59 INFO tool.CodeGenTool: Beginning code generation
-
16/06/07 08:48:59 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `t1` AS t LIMIT 1
-
16/06/07 08:48:59 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `t1` AS t LIMIT 1
-
16/06/07 08:48:59 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /opt/cloudera/parcels/CDH/lib/hadoop-mapreduce
-
Note: /tmp/sqoop-root/compile/07751371c513f90a6377d7b482c4a910/t1.java uses or overrides a deprecated API.
-
Note: Recompile with -Xlint:deprecation for details.
-
16/06/07 08:49:01 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/07751371c513f90a6377d7b482c4a910/t1.jar
-
16/06/07 08:49:01 WARN manager.MySQLManager: It looks like you are importing from mysql.
-
16/06/07 08:49:01 WARN manager.MySQLManager: This transfer can be faster! Use the --direct
-
16/06/07 08:49:01 WARN manager.MySQLManager: option to exercise a MySQL-specific fast path.
-
16/06/07 08:49:01 INFO manager.MySQLManager: Setting zero DATETIME behavior to convertToNull (mysql)
-
16/06/07 08:49:01 INFO mapreduce.ImportJobBase: Beginning import of t1
-
Exception in thread "main" <span style="color:#ff0000;">java.lang.NoClassDefFoundError: org/json/JSONObject</span>
-
at org.apache.sqoop.util.SqoopJsonUtil.getJsonStringforMap(SqoopJsonUtil.java:42)
-
at org.apache.sqoop.SqoopOptions.writeProperties(SqoopOptions.java:742)
-
at org.apache.sqoop.mapreduce.JobBase.putSqoopOptionsToConfiguration(JobBase.java:369)
-
at org.apache.sqoop.mapreduce.JobBase.createJob(JobBase.java:355)
-
at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:249)
-
at org.apache.sqoop.manager.SqlManager.importTable(SqlManager.java:692)
-
at org.apache.sqoop.manager.MySQLManager.importTable(MySQLManager.java:118)
-
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:497)
-
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)
-
at org.apache.sqoop.Sqoop.run(Sqoop.java:143)
-
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
-
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:179)
-
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:218)
-
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:227)
-
at org.apache.sqoop.Sqoop.main(Sqoop.java:236)
-
Caused by: <span style="color:#ff0000;">java.lang.ClassNotFoundException: org.json.JSONObject</span>
-
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
-
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
-
at java.security.AccessController.doPrivileged(Native Method)
-
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
-
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
-
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
-
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
-
... 15 more
解決:
這是因為sqoop缺少java-json.jar包.
如下面連結所說:
http://stackoverflow.com/questions/27504508/java-lang-noclassdeffounderror-org-json-jsonobject
The Exception it self says itall java.lang.ClassNotFoundException: org.json.JSONObject
You have not added thenecessary jar file which will be having org.json.JSONObject class to yourclasspath.
下載java-json.jar包:
http://www.java2s.com/Code/Jar/j/Downloadjavajsonjar.htm
把java-json.jar新增到../sqoop/lib目錄:
# cp java-json.jar/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/lib/sqoop/lib
[[email protected] lib]# pwd
/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/lib/sqoop/lib
[[email protected] lib]#
[[email protected] lib]$ ll java-json.jar
-rw-r--r-- 1 root root 84697 Oct 16 2013 java-json.jar
問題2:root使用者寫入HDFS檔案錯誤 "Permission denied: user=root"
-
[[email protected] lib]# sqoop import --connect jdbc:mysql://10.1.32.8:3306/test --username sqoop --password sqoop --table t1 -m 1
-
16/06/07 08:49:50 INFO sqoop.Sqoop: Running Sqoop version: 1.4.6-cdh5.7.0
-
16/06/07 08:49:50 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.
-
... ...
-
16/06/07 08:49:52 INFO manager.MySQLManager: Setting zero DATETIME behavior to convertToNull (mysql)
-
16/06/07 08:49:52 INFO mapreduce.ImportJobBase: Beginning import of t1
-
16/06/07 08:49:53 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar
-
16/06/07 08:49:53 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps
-
16/06/07 08:49:53 INFO client.RMProxy: Connecting to ResourceManager at hadoop0.hadoop.com/10.1.32.239:8032
-
16/06/07 08:49:54 <span style="color:#ff0000;">WARN security.UserGroupInformation: PriviledgedActionException as:root (auth:SIMPLE) cause:org.apache.hadoop.security.AccessControlException: Permission denied: user=root, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x</span>
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)
-
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)
-
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)
-
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)
-
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)
-
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
-
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
-
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
-
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)
-
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)
-
at java.security.AccessController.doPrivileged(Native Method)
-
at javax.security.auth.Subject.doAs(Subject.java:415)
-
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
-
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)
-
16/06/07 08:49:54 ERROR tool.ImportTool: Encountered IOException running import job: org.apache.hadoop.security.AccessControlException: Permission denied: user=root, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)
-
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)
-
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)
-
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)
-
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)
-
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
-
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
-
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
-
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)
-
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)
-
at java.security.AccessController.doPrivileged(Native Method)
-
at javax.security.auth.Subject.doAs(Subject.java:415)
-
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
-
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)
-
Caused by: <span style="color:#ff0000;">org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=root, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x</span>
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)
-
at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)
-
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)
-
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)
-
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)
-
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)
-
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)
-
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
-
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
-
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
-
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)
-
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)
-
at java.security.AccessController.doPrivileged(Native Method)
-
at javax.security.auth.Subject.doAs(Subject.java:415)
-
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
-
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)
-
at org.apache.hadoop.ipc.Client.call(Client.java:1471)
-
at org.apache.hadoop.ipc.Client.call(Client.java:1408)
-
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
-
at com.sun.proxy.$Proxy15.mkdirs(Unknown Source)
-
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:544)
-
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
-
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
-
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
-
at java.lang.reflect.Method.invoke(Method.java:606)
-
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:256)
-
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
-
at com.sun.proxy.$Proxy16.mkdirs(Unknown Source)
-
at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3082)
-
... 28 more
解決:
因為在使用sqoop匯入HDFS檔案時,使用的是root使用者,沒有寫hdfs檔案的許可權。
CDH安裝時建立了hdfs使用者,應使用hdfs使用者登入,再時行sqoop匯入
[[email protected] ~]# su - hdfs
[[email protected] ~]$ pwd
/var/lib/hadoop-hdfs
[[email protected] ~]$ ls
t1.java
問題3:其它hadoop節點不連線MySql "is not allowed to connect to this MySQL server"
-
[[email protected] ~]$ sqoop import --connect jdbc:mysql://10.1.32.8:3306/test --username sqoop --password sqoop --table t1 -m 1
-
16/06/07 08:51:52 INFO sqoop.Sqoop: Running Sqoop version: 1.4.6-cdh5.7.0
-
......
-
16/06/07 08:52:00 INFO mapreduce.Job: The url to track the job: http://hadoop0.hadoop.com:8088/proxy/application_1464249387420_0001/
-
16/06/07 08:52:00 INFO mapreduce.Job: Running job: job_1464249387420_0001
-
16/06/07 08:52:08 INFO mapreduce.Job: Job job_1464249387420_0001 running in uber mode : false
-
16/06/07 08:52:08 INFO mapreduce.Job: map 0% reduce 0%
-
16/06/07 08:52:14 INFO mapreduce.Job: Task Id : attempt_1464249387420_0001_m_000000_0, Status : FAILED
-
<span style="color:#ff0000;">Error: java.lang.RuntimeException: java.lang.RuntimeException: java.sql.SQLException: null, message from server: "Host 'hadoop4.hadoop.com' is not allowed to connect to this MySQL server"</span>
-
at org.apache.sqoop.mapreduce.db.DBInputFormat.setConf(DBInputFormat.java:167)
-
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
-
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
-
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:749)
-
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
-
at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
-
at java.security.AccessController.doPrivileged(Native Method)
-
at javax.security.auth.Subject.doAs(Subject.java:415)
-
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
-
at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
-
Caused by: java.lang.RuntimeException: java.sql.SQLException: null, message from server: "Host 'hadoop4.hadoop.com' is not allowed to connect to this MySQL server"
-
at org.apache.sqoop.mapreduce.db.DBInputFormat.getConnection(DBInputFormat.java:220)
-
at org.apache.sqoop.mapreduce.db.DBInputFormat.setConf(DBInputFormat.java:165)
-
... 9 more
-
Caused by: java.sql.SQLException: null, message from server: "Host 'hadoop4.hadoop.com' is not allowed to connect to this MySQL server"
解決:
這是因為客戶端沒有訪問mysql的許可權,修改sqoop使用者的客戶端訪問許可權。
--登入mysql伺服器
[[email protected] ~]# mysql -uroot -proot
mysql> grant all privileges on *.* to 'sqoop'@'%' identified by 'sqoop' with grant option;
Query OK, 0 rows affected (0.00 sec)