apache-sparkapache-atlas

Can't setup spark application with spark-atlas-connector


Can't setup my spark application with apache atlas via spark-atlas-connector .

I had clone https://github.com/hortonworks-spark/spark-atlas-connector project and executed mvn package. Then I put all jars in my project and setup config like this:

def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf()
      .setAppName("atlas-test")
      .setMaster("local[2]")
      .set("spark.extraListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker")
      .set("spark.sql.queryExecutionListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker")
      .set("spark.sql.streaming.streamingQueryListeners", "com.hortonworks.spark.atlas.SparkAtlasStreamingQueryEventTracker")

    val spark = SparkSession.builder()
      .config(sparkConf)
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._


    val df = spark.read.format("kafka")
      .option("kafka.bootstrap.servers", BROKER_SERVERS)
      .option("subscribe", "foobar")
      .option("startingOffset", "earliest")
      .load()

    df.show()

    df.write
      .format("kafka")
      .option("kafka.bootstrap.servers", BROKER_SERVERS)
      .option("topic", "foobar-out")
      .save()

  }

Atlas is started via docker container which I pulled. Kafka with Zookeper are stared via docker container which I pulled too.

The job works without spark-atlas-connector but when I want to add a connector it throws exceptions.

19/08/09 16:40:16 ERROR SparkContext: Error initializing SparkContext.
org.apache.spark.SparkException: Exception when registering SparkListener
    at org.apache.spark.SparkContext.setupAndStartListenerBus(SparkContext.scala:2398)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:555)
    at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520)
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:935)
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:926)
    at scala.Option.getOrElse(Option.scala:121)
    at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:926)
    at Boot$.main(Boot.scala:21)
    at Boot.main(Boot.scala)
Caused by: org.apache.atlas.AtlasException: Failed to load application properties
    at org.apache.atlas.ApplicationProperties.get(ApplicationProperties.java:134)
    at org.apache.atlas.ApplicationProperties.get(ApplicationProperties.java:86)
    at com.hortonworks.spark.atlas.AtlasClientConf.configuration$lzycompute(AtlasClientConf.scala:25)
    at com.hortonworks.spark.atlas.AtlasClientConf.configuration(AtlasClientConf.scala:25)
    at com.hortonworks.spark.atlas.AtlasClientConf.get(AtlasClientConf.scala:50)
    at com.hortonworks.spark.atlas.AtlasClient$.atlasClient(AtlasClient.scala:120)
    at com.hortonworks.spark.atlas.SparkAtlasEventTracker.<init>(SparkAtlasEventTracker.scala:33)
    at com.hortonworks.spark.atlas.SparkAtlasEventTracker.<init>(SparkAtlasEventTracker.scala:37)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.spark.util.Utils$$anonfun$loadExtensions$1.apply(Utils.scala:2691)
    at org.apache.spark.util.Utils$$anonfun$loadExtensions$1.apply(Utils.scala:2680)
    at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
    at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
    at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
    at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
    at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
    at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
    at org.apache.spark.util.Utils$.loadExtensions(Utils.scala:2680)
    at org.apache.spark.SparkContext$$anonfun$setupAndStartListenerBus$1.apply(SparkContext.scala:2387)
    at org.apache.spark.SparkContext$$anonfun$setupAndStartListenerBus$1.apply(SparkContext.scala:2386)
    at scala.Option.foreach(Option.scala:257)
    at org.apache.spark.SparkContext.setupAndStartListenerBus(SparkContext.scala:2386)
    ... 8 more
Caused by: com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.ConfigurationException: Cannot locate configuration source null
    at com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.AbstractFileConfiguration.load(AbstractFileConfiguration.java:259)
    at com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.AbstractFileConfiguration.load(AbstractFileConfiguration.java:238)
    at com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.AbstractFileConfiguration.<init>(AbstractFileConfiguration.java:197)
    at com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.PropertiesConfiguration.<init>(PropertiesConfiguration.java:284)
    at org.apache.atlas.ApplicationProperties.<init>(ApplicationProperties.java:69)
    at org.apache.atlas.ApplicationProperties.get(ApplicationProperties.java:125)
    ... 32 more
19/08/09 16:40:16 INFO SparkContext: SparkContext already stopped.
Exception in thread "main" org.apache.spark.SparkException: Exception when registering SparkListener
    at org.apache.spark.SparkContext.setupAndStartListenerBus(SparkContext.scala:2398)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:555)
    at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520)
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:935)
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:926)
    at scala.Option.getOrElse(Option.scala:121)
    at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:926)
    at Boot$.main(Boot.scala:21)
    at Boot.main(Boot.scala)
Caused by: org.apache.atlas.AtlasException: Failed to load application properties
    at org.apache.atlas.ApplicationProperties.get(ApplicationProperties.java:134)
    at org.apache.atlas.ApplicationProperties.get(ApplicationProperties.java:86)
    at com.hortonworks.spark.atlas.AtlasClientConf.configuration$lzycompute(AtlasClientConf.scala:25)
    at com.hortonworks.spark.atlas.AtlasClientConf.configuration(AtlasClientConf.scala:25)
    at com.hortonworks.spark.atlas.AtlasClientConf.get(AtlasClientConf.scala:50)
    at com.hortonworks.spark.atlas.AtlasClient$.atlasClient(AtlasClient.scala:120)
    at com.hortonworks.spark.atlas.SparkAtlasEventTracker.<init>(SparkAtlasEventTracker.scala:33)
    at com.hortonworks.spark.atlas.SparkAtlasEventTracker.<init>(SparkAtlasEventTracker.scala:37)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.spark.util.Utils$$anonfun$loadExtensions$1.apply(Utils.scala:2691)
    at org.apache.spark.util.Utils$$anonfun$loadExtensions$1.apply(Utils.scala:2680)
    at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
    at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
    at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
    at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
    at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
    at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
    at org.apache.spark.util.Utils$.loadExtensions(Utils.scala:2680)
    at org.apache.spark.SparkContext$$anonfun$setupAndStartListenerBus$1.apply(SparkContext.scala:2387)
    at org.apache.spark.SparkContext$$anonfun$setupAndStartListenerBus$1.apply(SparkContext.scala:2386)
    at scala.Option.foreach(Option.scala:257)
    at org.apache.spark.SparkContext.setupAndStartListenerBus(SparkContext.scala:2386)
    ... 8 more
Caused by: com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.ConfigurationException: Cannot locate configuration source null
    at com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.AbstractFileConfiguration.load(AbstractFileConfiguration.java:259)
    at com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.AbstractFileConfiguration.load(AbstractFileConfiguration.java:238)
    at com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.AbstractFileConfiguration.<init>(AbstractFileConfiguration.java:197)
    at com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.PropertiesConfiguration.<init>(PropertiesConfiguration.java:284)
    at org.apache.atlas.ApplicationProperties.<init>(ApplicationProperties.java:69)
    at org.apache.atlas.ApplicationProperties.get(ApplicationProperties.java:125)
    ... 32 more
19/08/09 16:40:17 INFO ShutdownHookManager: Shutdown hook called

Solution

  • I believe you have forgotten one more step from the setup documentation. The error you have stems from

    Caused by: com.hortonworks.spark.atlas.shade.org.apache.commons.configuration.ConfigurationException: Cannot locate configuration source null
    

    And to quote their README file in the github repo you've posted:

    Also make sure atlas configuration file atlas-application.properties is in the Driver's classpath. For example, putting this file into <SPARK_HOME>/conf.