apache spark - Scala:case class runTime Error -
this demo ran ok. when move class function(my former project) , call function, compiles failure.
object dfmain { case class person(name: string, age: double, t:string) def main (args: array[string]): unit = { val sc = new sparkcontext("local", "scala word count") val sqlcontext = new org.apache.spark.sql.sqlcontext(sc) import sqlcontext.implicits._ val bsonrdd = sc.parallelize(("foo",1,"female"):: ("bar",2,"male"):: ("baz",-1,"female")::nil) .map(tuple=>{ var bson = new basicbsonobject() bson.put("name","bfoo") bson.put("value",0.1) bson.put("t","female") (null,bson) }) val tdf = bsonrdd.map(_._2) .map(f=>person(f.get("name").tostring, f.get("value").tostring.todouble, f.get("t").tostring)).todf() tdf.limit(1).show() } }
'mysqldao.insertintomysql()' compile error
object mysqldao { private val sc= new sparkcontext("local", "scala word count") val sqlcontext = new org.apache.spark.sql.sqlcontext(sc) import sqlcontext.implicits._ case class person(name: string, age: double, t:string) def insertintomysql(): unit ={ val bsonrdd = sc.parallelize(("foo",1,"female"):: ("bar",2,"male"):: ("baz",-1,"female")::nil) .map(tuple=>{ val bson = new basicbsonobject() bson.put("name","bfoo") bson.put("value",0.1) bson.put("t","female") (null,bson) }) val tdf = bsonrdd.map(_._2).map( f=> person(f.get("name").tostring, f.get("value").tostring.todouble, f.get("t").tostring)).todf() tdf.limit(1).show() } }
will, when call 'mysqldao.insertintomysql()' gets error of
value typedproductiterator not member of object scala.runtim.scala.scalarunttime
case class person(name: string, age: double, t:string)
i suppose case class isn't seen in closure inside map function. move package level.
case class person(name: string, age: double, t:string) object mysqldao { ... }
Comments
Post a Comment