Failed to run the Spark example locally on a macbook with error "Lost task 1.0 in stage 0.0" -


i installed spark , run "run-example sparkpi 10", , error message is: "spark-submit examples/src/main/python/pi.py 10" has similar error.

14/11/19 17:08:04 info executor: running task 2.0 in stage 0.0 (tid 2) 14/11/19 17:08:04 info executor: running task 3.0 in stage 0.0 (tid 3) 14/11/19 17:08:04 info executor: running task 1.0 in stage 0.0 (tid 1) 14/11/19 17:08:04 info executor: running task 0.0 in stage 0.0 (tid 0) 14/11/19 17:08:04 info executor: running task 5.0 in stage 0.0 (tid 5) 14/11/19 17:08:04 info executor: running task 6.0 in stage 0.0 (tid 6) 14/11/19 17:08:04 info executor: running task 4.0 in stage 0.0 (tid 4) 14/11/19 17:08:04 info executor: running task 7.0 in stage 0.0 (tid 7) 14/11/19 17:08:04 info executor: fetching http://192.168.1.80:57278/jars/spark-examples-1.1.0-hadoop2.4.0.jar timestamp 1416388083980 14/11/19 17:08:04 info utils: fetching http://192.168.1.80:57278/jars/spark-examples-1.1.0-hadoop2.4.0.jar /var/folders/6k/nww6s1p52yg424zdcckvpwvc0000gn/t/fetchfiletemp6287870778953166340.tmp 14/11/19 17:09:04 info executor: fetching http://192.168.1.80:57278/jars/spark-examples-1.1.0-hadoop2.4.0.jar timestamp 1416388083980 14/11/19 17:09:04 info utils: fetching http://192.168.1.80:57278/jars/spark-examples-1.1.0-hadoop2.4.0.jar /var/folders/6k/nww6s1p52yg424zdcckvpwvc0000gn/t/fetchfiletemp6122384738311225749.tmp **14/11/19 17:09:04 error executor: exception in task 1.0 in stage 0.0 (tid 1) java.net.sockettimeoutexception: read timed out**     @ java.net.socketinputstream.socketread0(native method)     @ java.net.socketinputstream.read(socketinputstream.java:150)     @ java.net.socketinputstream.read(socketinputstream.java:121)     @ java.io.bufferedinputstream.fill(bufferedinputstream.java:246)     @ java.io.bufferedinputstream.read1(bufferedinputstream.java:286)     @ java.io.bufferedinputstream.read(bufferedinputstream.java:345)     @ sun.net.www.http.httpclient.parsehttpheader(httpclient.java:703)     @ sun.net.www.http.httpclient.parsehttp(httpclient.java:647)     @ sun.net.www.protocol.http.httpurlconnection.getinputstream0(httpurlconnection.java:1534)     @ sun.net.www.protocol.http.httpurlconnection.getinputstream(httpurlconnection.java:1439)     @ org.apache.spark.util.utils$.fetchfile(utils.scala:376)     @ org.apache.spark.executor.executor$$anonfun$org$apache$spark$executor$executor$$updatedependencies$6.apply(executor.scala:325)     @ org.apache.spark.executor.executor$$anonfun$org$apache$spark$executor$executor$$updatedependencies$6.apply(executor.scala:323)     @ scala.collection.traversablelike$withfilter$$anonfun$foreach$1.apply(traversablelike.scala:772)     @ scala.collection.mutable.hashmap$$anonfun$foreach$1.apply(hashmap.scala:98)     @ scala.collection.mutable.hashmap$$anonfun$foreach$1.apply(hashmap.scala:98)     @ scala.collection.mutable.hashtable$class.foreachentry(hashtable.scala:226)     @ scala.collection.mutable.hashmap.foreachentry(hashmap.scala:39)     @ scala.collection.mutable.hashmap.foreach(hashmap.scala:98)     @ scala.collection.traversablelike$withfilter.foreach(traversablelike.scala:771)     @ org.apache.spark.executor.executor.org$apache$spark$executor$executor$$updatedependencies(executor.scala:323)     @ org.apache.spark.executor.executor$taskrunner.run(executor.scala:158)     @ java.util.concurrent.threadpoolexecutor.runworker(threadpoolexecutor.java:1142)     @ java.util.concurrent.threadpoolexecutor$worker.run(threadpoolexecutor.java:617)     @ java.lang.thread.run(thread.java:745) 14/11/19 17:09:04 info tasksetmanager: starting task 8.0 in stage 0.0 (tid 8, localhost, process_local, 1229 bytes) **14/11/19 17:09:04 info executor: running task 8.0 in stage 0.0 (tid 8) 14/11/19 17:09:04 warn tasksetmanager: lost task 1.0 in stage 0.0 (tid 1, localhost): java.net.sockettimeoutexception: read timed out**         java.net.socketinputstream.socketread0(native method)         java.net.socketinputstream.read(socketinputstream.java:150)         java.net.socketinputstream.read(socketinputstream.java:121)         java.io.bufferedinputstream.fill(bufferedinputstream.java:246)         java.io.bufferedinputstream.read1(bufferedinputstream.java:286)         java.io.bufferedinputstream.read(bufferedinputstream.java:345)         sun.net.www.http.httpclient.parsehttpheader(httpclient.java:703)         sun.net.www.http.httpclient.parsehttp(httpclient.java:647)         sun.net.www.protocol.http.httpurlconnection.getinputstream0(httpurlconnection.java:1534)         sun.net.www.protocol.http.httpurlconnection.getinputstream(httpurlconnection.java:1439)         org.apache.spark.util.utils$.fetchfile(utils.scala:376)         org.apache.spark.executor.executor$$anonfun$org$apache$spark$executor$executor$$updatedependencies$6.apply(executor.scala:325)         org.apache.spark.executor.executor$$anonfun$org$apache$spark$executor$executor$$updatedependencies$6.apply(executor.scala:323)         scala.collection.traversablelike$withfilter$$anonfun$foreach$1.apply(traversablelike.scala:772)         scala.collection.mutable.hashmap$$anonfun$foreach$1.apply(hashmap.scala:98)         scala.collection.mutable.hashmap$$anonfun$foreach$1.apply(hashmap.scala:98)         scala.collection.mutable.hashtable$class.foreachentry(hashtable.scala:226)         scala.collection.mutable.hashmap.foreachentry(hashmap.scala:39)         scala.collection.mutable.hashmap.foreach(hashmap.scala:98)         scala.collection.traversablelike$withfilter.foreach(traversablelike.scala:771)         org.apache.spark.executor.executor.org$apache$spark$executor$executor$$updatedependencies(executor.scala:323)         org.apache.spark.executor.executor$taskrunner.run(executor.scala:158)         java.util.concurrent.threadpoolexecutor.runworker(threadpoolexecutor.java:1142)         java.util.concurrent.threadpoolexecutor$worker.run(threadpoolexecutor.java:617)         java.lang.thread.run(thread.java:745) 14/11/19 17:09:04 error tasksetmanager: task 1 in stage 0.0 failed 1 times; aborting job 14/11/19 17:09:04 info taskschedulerimpl: cancelling stage 0 14/11/19 17:09:04 info executor: executor trying kill task 0.0 in stage 0.0 (tid 0) 14/11/19 17:09:04 info taskschedulerimpl: stage 0 cancelled 14/11/19 17:09:04 info executor: executor trying kill task 5.0 in stage 0.0 (tid 5) 14/11/19 17:09:04 info executor: executor trying kill task 2.0 in stage 0.0 (tid 2) 14/11/19 17:09:04 info executor: executor trying kill task 6.0 in stage 0.0 (tid 6) 14/11/19 17:09:04 info executor: executor trying kill task 3.0 in stage 0.0 (tid 3) 14/11/19 17:09:04 info executor: executor trying kill task 7.0 in stage 0.0 (tid 7) 14/11/19 17:09:04 info executor: executor trying kill task 4.0 in stage 0.0 (tid 4) 14/11/19 17:09:04 info executor: executor trying kill task 8.0 in stage 0.0 (tid 8) 14/11/19 17:09:04 info dagscheduler: failed run reduce @ sparkpi.scala:35 **exception in thread "main" org.apache.spark.sparkexception: job aborted due stage failure: task 1 in stage 0.0 failed 1 times, recent failure: lost task 1.0 in stage 0.0 (tid 1, localhost): java.net.sockettimeoutexception: read timed out**         java.net.socketinputstream.socketread0(native method)         java.net.socketinputstream.read(socketinputstream.java:150)         java.net.socketinputstream.read(socketinputstream.java:121)         java.io.bufferedinputstream.fill(bufferedinputstream.java:246)         java.io.bufferedinputstream.read1(bufferedinputstream.java:286)         java.io.bufferedinputstream.read(bufferedinputstream.java:345)         sun.net.www.http.httpclient.parsehttpheader(httpclient.java:703)         sun.net.www.http.httpclient.parsehttp(httpclient.java:647)         sun.net.www.protocol.http.httpurlconnection.getinputstream0(httpurlconnection.java:1534)         sun.net.www.protocol.http.httpurlconnection.getinputstream(httpurlconnection.java:1439)         org.apache.spark.util.utils$.fetchfile(utils.scala:376)         org.apache.spark.executor.executor$$anonfun$org$apache$spark$executor$executor$$updatedependencies$6.apply(executor.scala:325)         org.apache.spark.executor.executor$$anonfun$org$apache$spark$executor$executor$$updatedependencies$6.apply(executor.scala:323)         scala.collection.traversablelike$withfilter$$anonfun$foreach$1.apply(traversablelike.scala:772)         scala.collection.mutable.hashmap$$anonfun$foreach$1.apply(hashmap.scala:98)         scala.collection.mutable.hashmap$$anonfun$foreach$1.apply(hashmap.scala:98)         scala.collection.mutable.hashtable$class.foreachentry(hashtable.scala:226)         scala.collection.mutable.hashmap.foreachentry(hashmap.scala:39)         scala.collection.mutable.hashmap.foreach(hashmap.scala:98)         scala.collection.traversablelike$withfilter.foreach(traversablelike.scala:771)         org.apache.spark.executor.executor.org$apache$spark$executor$executor$$updatedependencies(executor.scala:323)         org.apache.spark.executor.executor$taskrunner.run(executor.scala:158)         java.util.concurrent.threadpoolexecutor.runworker(threadpoolexecutor.java:1142)         java.util.concurrent.threadpoolexecutor$worker.run(threadpoolexecutor.java:617)         java.lang.thread.run(thread.java:745) driver stacktrace:     @ org.apache.spark.scheduler.dagscheduler.org$apache$spark$scheduler$dagscheduler$$failjobandindependentstages(dagscheduler.scala:1185)     @ org.apache.spark.scheduler.dagscheduler$$anonfun$abortstage$1.apply(dagscheduler.scala:1174)     @ org.apache.spark.scheduler.dagscheduler$$anonfun$abortstage$1.apply(dagscheduler.scala:1173)     @ scala.collection.mutable.resizablearray$class.foreach(resizablearray.scala:59)     @ scala.collection.mutable.arraybuffer.foreach(arraybuffer.scala:47)     @ org.apache.spark.scheduler.dagscheduler.abortstage(dagscheduler.scala:1173)     @ org.apache.spark.scheduler.dagscheduler$$anonfun$handletasksetfailed$1.apply(dagscheduler.scala:688)     @ org.apache.spark.scheduler.dagscheduler$$anonfun$handletasksetfailed$1.apply(dagscheduler.scala:688)     @ scala.option.foreach(option.scala:236)     @ org.apache.spark.scheduler.dagscheduler.handletasksetfailed(dagscheduler.scala:688)     @ org.apache.spark.scheduler.dagschedulereventprocessactor$$anonfun$receive$2.applyorelse(dagscheduler.scala:1391)     @ akka.actor.actorcell.receivemessage(actorcell.scala:498)     @ akka.actor.actorcell.invoke(actorcell.scala:456)     @ akka.dispatch.mailbox.processmailbox(mailbox.scala:237)     @ akka.dispatch.mailbox.run(mailbox.scala:219)     @ akka.dispatch.forkjoinexecutorconfigurator$akkaforkjointask.exec(abstractdispatcher.scala:386)     @ scala.concurrent.forkjoin.forkjointask.doexec(forkjointask.java:260)     @ scala.concurrent.forkjoin.forkjoinpool$workqueue.runtask(forkjoinpool.java:1339)     @ scala.concurrent.forkjoin.forkjoinpool.runworker(forkjoinpool.java:1979)     @ scala.concurrent.forkjoin.forkjoinworkerthread.run(forkjoinworkerthread.java:107) 

i googled , didn't hints. help? thank you:)

thanks, chao


Comments

Popular posts from this blog

java - Oracle EBS .ClassNotFoundException: oracle.apps.fnd.formsClient.FormsLauncher.class ERROR -

c# - how to use buttonedit in devexpress gridcontrol -

nvd3.js - angularjs-nvd3-directives setting color in legend as well as in chart elements -