分享

spark数据插入到mysql报错

玉溪 发表于 2016-10-10 01:25:54 [显示全部楼层] 回帖奖励 阅读模式 关闭右栏 2 11883
import java.util.Properties

import org.apache.spark.sql.SaveMode
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}

object to_mysql {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("sparkToMysql").setMaster("spark://master:7077").set("spark executor memory","1g")
    val sc = new SparkContext(conf)
    val hiveContext = new HiveContext(sc)
    val sql = "select t_date,stock_code,open_price,high_price,close_price,low_price,volume,price_change,p_change,ma5,ma10,ma20,v_ma5,v_ma10,v_ma20,turnover,deal_time from tw_stock_d"
    val sqlrdd = hiveContext.sql(sql).write.mode(SaveMode.Append).jdbc("jdbc:mysql://192.168.1.33:3306/stock?user=root&password=password","tw_stock_d",new Properties)
    sc.stop()
  }
}错误提示:16/10/09 13:27:08 INFO storage.BlockManagerInfo: Added broadcast_0_piece0 in memory on slave1:52829 (size: 42.0 KB, free: 1247.5 MB)16/10/09 13:27:09 INFO storage.BlockManagerInfo: Added broadcast_0_piece0 in memory on slave3:41419 (size: 42.0 KB, free: 1247.5 MB)16/10/09 13:27:15 INFO scheduler.TaskSetManager: Starting task 4.0 in stage 0.0 (TID 2, slave1, partition 4,NODE_LOCAL, 2287 bytes)16/10/09 13:27:15 INFO scheduler.TaskSetManager: Finished task 3.0 in stage 0.0 (TID 1) in 13721 ms on slave1 (1/1163)16/10/09 13:27:16 INFO scheduler.TaskSetManager: Starting task 5.0 in stage 0.0 (TID 3, slave3, partition 5,NODE_LOCAL, 2287 bytes)16/10/09 13:27:16 WARN scheduler.TaskSetManager: Lost task 2.0 in stage 0.0 (TID 0, slave3): java.sql.SQLException: Access denied for user 'root'@'slave3' (using password: YES)        at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1084)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:4232)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:4164)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:926)        at com.mysql.jdbc.MysqlIO.secureAuth411(MysqlIO.java:4732)        at com.mysql.jdbc.MysqlIO.doHandshake(MysqlIO.java:1340)        at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2506)        at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2539)        at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2321)        at com.mysql.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:832)        at com.mysql.jdbc.JDBC4Connection.<init>(JDBC4Connection.java:46)        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)        at com.mysql.jdbc.Util.handleNewInstance(Util.java:409)        at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:417)        at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:344)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$2.apply(JdbcUtils.scala:61)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$2.apply(JdbcUtils.scala:52)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.savePartition(JdbcUtils.scala:157)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$saveTable$1.apply(JdbcUtils.scala:277)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$saveTable$1.apply(JdbcUtils.scala:276)        at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)        at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)        at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)        at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)        at org.apache.spark.scheduler.Task.run(Task.scala:89)        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)        at java.lang.Thread.run(Thread.java:745)16/10/09 13:27:17 INFO scheduler.TaskSetManager: Starting task 2.1 in stage 0.0 (TID 4, slave3, partition 2,NODE_LOCAL, 2287 bytes)16/10/09 13:27:17 INFO scheduler.TaskSetManager: Lost task 5.0 in stage 0.0 (TID 3) on executor slave3: java.sql.SQLException (Access denied for user 'root'@'slave3' (using password: YES)) [duplicate 1]16/10/09 13:27:17 INFO scheduler.TaskSetManager: Starting task 5.1 in stage 0.0 (TID 5, slave3, partition 5,NODE_LOCAL, 2287 bytes)16/10/09 13:27:17 INFO scheduler.TaskSetManager: Lost task 2.1 in stage 0.0 (TID 4) on executor slave3: java.sql.SQLException (Access denied for user 'root'@'slave3' (using password: YES)) [duplicate 2]16/10/09 13:27:17 INFO scheduler.TaskSetManager: Starting task 2.2 in stage 0.0 (TID 6, slave3, partition 2,NODE_LOCAL, 2287 bytes)16/10/09 13:27:17 INFO scheduler.TaskSetManager: Lost task 5.1 in stage 0.0 (TID 5) on executor slave3: java.sql.SQLException (Access denied for user 'root'@'slave3' (using password: YES)) [duplicate 3]16/10/09 13:27:17 INFO scheduler.TaskSetManager: Starting task 5.2 in stage 0.0 (TID 7, slave3, partition 5,NODE_LOCAL, 2287 bytes)16/10/09 13:27:17 INFO scheduler.TaskSetManager: Lost task 2.2 in stage 0.0 (TID 6) on executor slave3: java.sql.SQLException (Access denied for user 'root'@'slave3' (using password: YES)) [duplicate 4]16/10/09 13:27:18 INFO scheduler.TaskSetManager: Starting task 2.3 in stage 0.0 (TID 8, slave3, partition 2,NODE_LOCAL, 2287 bytes)16/10/09 13:27:18 INFO scheduler.TaskSetManager: Lost task 5.2 in stage 0.0 (TID 7) on executor slave3: java.sql.SQLException (Access denied for user 'root'@'slave3' (using password: YES)) [duplicate 5]16/10/09 13:27:18 INFO scheduler.TaskSetManager: Starting task 5.3 in stage 0.0 (TID 9, slave1, partition 5,NODE_LOCAL, 2287 bytes)16/10/09 13:27:18 INFO scheduler.TaskSetManager: Finished task 4.0 in stage 0.0 (TID 2) in 2691 ms on slave1 (2/1163)16/10/09 13:27:18 INFO scheduler.TaskSetManager: Starting task 6.0 in stage 0.0 (TID 10, slave3, partition 6,NODE_LOCAL, 2287 bytes)16/10/09 13:27:18 INFO scheduler.TaskSetManager: Lost task 2.3 in stage 0.0 (TID 8) on executor slave3: java.sql.SQLException (Access denied for user 'root'@'slave3' (using password: YES)) [duplicate 6]16/10/09 13:27:18 ERROR scheduler.TaskSetManager: Task 2 in stage 0.0 failed 4 times; aborting job16/10/09 13:27:18 INFO scheduler.TaskSchedulerImpl: Cancelling stage 016/10/09 13:27:18 INFO scheduler.TaskSchedulerImpl: Stage 0 was cancelled16/10/09 13:27:18 INFO scheduler.DAGScheduler: ResultStage 0 (jdbc at <console>:35) failed in 17.063 s16/10/09 13:27:18 INFO scheduler.DAGScheduler: Job 0 failed: jdbc at <console>:35, took 18.350152 s16/10/09 13:27:19 INFO scheduler.TaskSetManager: Lost task 6.0 in stage 0.0 (TID 10) on executor slave3: java.sql.SQLException (Access denied for user 'root'@'slave3' (using password: YES)) [duplicate 7]org.apache.spark.SparkException: Job aborted due to stage failure: Task 2 in stage 0.0 failed 4 times, most recent failure: Lost task 2.3 in stage 0.0 (TID 8, slave3): java.sql.SQLException: Access denied for user 'root'@'slave3' (using password: YES)        at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1084)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:4232)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:4164)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:926)        at com.mysql.jdbc.MysqlIO.secureAuth411(MysqlIO.java:4732)        at com.mysql.jdbc.MysqlIO.doHandshake(MysqlIO.java:1340)        at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2506)        at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2539)        at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2321)        at com.mysql.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:832)        at com.mysql.jdbc.JDBC4Connection.<init>(JDBC4Connection.java:46)        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)        at com.mysql.jdbc.Util.handleNewInstance(Util.java:409)        at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:417)        at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:344)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$2.apply(JdbcUtils.scala:61)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$2.apply(JdbcUtils.scala:52)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.savePartition(JdbcUtils.scala:157)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$saveTable$1.apply(JdbcUtils.scala:277)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$saveTable$1.apply(JdbcUtils.scala:276)        at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)        at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)        at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)        at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)        at org.apache.spark.scheduler.Task.run(Task.scala:89)        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)        at java.lang.Thread.run(Thread.java:745)Driver stacktrace:        at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1431)        at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1419)        at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1418)        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)        at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1418)        at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:799)        at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:799)        at scala.Option.foreach(Option.scala:236)        at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:799)        at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1640)        at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1599)        at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1588)        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)        at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:620)        at org.apache.spark.SparkContext.runJob(SparkContext.scala:1832)        at org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)        at org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)        at org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)        at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:920)        at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:918)        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)        at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)        at org.apache.spark.rdd.RDD.foreachPartition(RDD.scala:918)        at org.apache.spark.sql.DataFrame$$anonfun$foreachPartition$1.apply$mcV$sp(DataFrame.scala:1444)        at org.apache.spark.sql.DataFrame$$anonfun$foreachPartition$1.apply(DataFrame.scala:1444)        at org.apache.spark.sql.DataFrame$$anonfun$foreachPartition$1.apply(DataFrame.scala:1444)        at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:56)        at org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:2086)        at org.apache.spark.sql.DataFrame.foreachPartition(DataFrame.scala:1443)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.saveTable(JdbcUtils.scala:276)        at org.apache.spark.sql.DataFrameWriter.jdbc(DataFrameWriter.scala:311)        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:40)        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:42)        at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:44)        at $iwC$$iwC$$iwC$$iwC.<init>(<console>:46)        at $iwC$$iwC$$iwC.<init>(<console>:48)        at $iwC$$iwC.<init>(<console>:50)        at $iwC.<init>(<console>:52)        at <init>(<console>:54)        at .<init>(<console>:58)        at .<clinit>(<console>)        at .<init>(<console>:7)        at .<clinit>(<console>)        at $print(<console>)        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)        at java.lang.reflect.Method.invoke(Method.java:606)        at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)        at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)        at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)        at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)        at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)        at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)        at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)        at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)        at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)        at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)        at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)        at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)        at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)        at org.apache.spark.repl.Main$.main(Main.scala:31)        at org.apache.spark.repl.Main.main(Main.scala)        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)        at java.lang.reflect.Method.invoke(Method.java:606)        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)Caused by: java.sql.SQLException: Access denied for user 'root'@'slave3' (using password: YES)        at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1084)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:4232)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:4164)        at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:926)        at com.mysql.jdbc.MysqlIO.secureAuth411(MysqlIO.java:4732)        at com.mysql.jdbc.MysqlIO.doHandshake(MysqlIO.java:1340)        at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2506)        at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2539)        at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2321)        at com.mysql.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:832)        at com.mysql.jdbc.JDBC4Connection.<init>(JDBC4Connection.java:46)        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)        at com.mysql.jdbc.Util.handleNewInstance(Util.java:409)        at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:417)        at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:344)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$2.apply(JdbcUtils.scala:61)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$2.apply(JdbcUtils.scala:52)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.savePartition(JdbcUtils.scala:157)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$saveTable$1.apply(JdbcUtils.scala:277)        at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$saveTable$1.apply(JdbcUtils.scala:276)        at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)        at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)        at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)        at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)        at org.apache.spark.scheduler.Task.run(Task.scala:89)        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)        at java.lang.Thread.run(Thread.java:745)不知道怎么解决?是mysql连接数太多吗?只有少数的数据插入

已有(2)人评论

跳转到指定楼层
qcbb001 发表于 2016-10-10 08:05:30
mysql不允许这个用户连接,并且需要使用密码,授权下就好了
'root'@'slave3'
回复

使用道具 举报

玉溪 发表于 2016-10-10 11:22:56
非常感谢 我的root@slave3 没有设置密码,默认都没有密码、我只设置了%访问权限
回复

使用道具 举报

您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

关闭

推荐上一条 /2 下一条