Registering the Spark UDFs
Registering the SparkSQL user-defined functions
Log in to the master node with a user account having permissions to create and drop UDFs.
To navigate to the directory that contains the helper script, run the following command:
cd /opt/cloudera/parcels/PTY_BDP/pepspark/scriptsTo create the UDFs using the helper script, on the spark-shell, run the following command:
:load /opt/cloudera/parcels/PTY_BDP/pepspark/scripts/create_spark_sql_udfs.scalaPress ENTER.
The script creates all the required user-defined functions for SparkSQL in the current spark-shell session.
Loading /opt/cloudera/parcels/PTY_BDP/pepspark/scripts/create_spark_sql_udfs.scala... res0: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2557/1214243533@e9f28,StringType,List(),Some(class[value[0]: string]),Some(ptyGetVersion),true,true) res1: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2603/321785376@684ad81c,StringType,List(),Some(class[value[0]: string]),Some(ptyGetVersionExtended),true,true) res2: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2604/289080194@594bedf5,StringType,List(),Some(class[value[0]: string]),Some(ptyWhoAmI),true,true) res3: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2605/430442099@6ec6adcc,StringType,List(Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: string]),Some(ptyProtectStr),true,true) res4: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2612/1566019818@55b678dc,StringType,List(Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: string]),Some(ptyUnprotectStr),true,true) res5: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2613/1992744664@2dff4ef9,StringType,List(Some(class[value[0]: string]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: string]),Some(ptyReprotectStr),true,true) res6: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2621/2144907913@4d13970d,StringType,List(Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: string]),Some(ptyProtectUnicode),true,true) res7: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2622/567181258@7c8d4a94,StringType,List(Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: string]),Some(ptyUnprotectUnicode),true,true) res8: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2623/1248911890@590eb2c5,StringType,List(Some(class[value[0]: string]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: string]),Some(ptyReprotectUnicode),true,true) res9: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2639/1206966491@4e3617fe,ShortType,List(Some(class[value[0]: smallint]), Some(class[value[0]: string])),Some(class[value[0]: smallint]),Some(ptyProtectShort),false,true) res10: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2643/1430577369@5056f8d7,ShortType,List(Some(class[value[0]: smallint]), Some(class[value[0]: string])),Some(class[value[0]: smallint]),Some(ptyUnprotectShort),false,true) res11: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2644/1959246940@3e7d458a,ShortType,List(Some(class[value[0]: smallint]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: smallint]),Some(ptyReprotectShort),false,true) res12: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2646/468430240@6b874125,IntegerType,List(Some(class[value[0]: int]), Some(class[value[0]: string])),Some(class[value[0]: int]),Some(ptyProtectInt),false,true) res13: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2648/1849024377@377b8c99,IntegerType,List(Some(class[value[0]: int]), Some(class[value[0]: string])),Some(class[value[0]: int]),Some(ptyUnprotectInt),false,true) res14: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2649/1850050643@1ddbf1b0,IntegerType,List(Some(class[value[0]: int]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: int]),Some(ptyReprotectInt),false,true) res15: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2650/1751709974@65f23702,LongType,List(Some(class[value[0]: bigint]), Some(class[value[0]: string])),Some(class[value[0]: bigint]),Some(ptyProtectLong),false,true) res16: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2652/1397163963@5d98ac30,LongType,List(Some(class[value[0]: bigint]), Some(class[value[0]: string])),Some(class[value[0]: bigint]),Some(ptyUnprotectLong),false,true) res17: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2653/231449448@5ce648c7,LongType,List(Some(class[value[0]: bigint]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: bigint]),Some(ptyReprotectLong),false,true) res18: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2654/916221467@203dff48,FloatType,List(Some(class[value[0]: float]), Some(class[value[0]: string])),Some(class[value[0]: float]),Some(ptyProtectFloat),false,true) res19: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2656/1642716671@2403ecd0,FloatType,List(Some(class[value[0]: float]), Some(class[value[0]: string])),Some(class[value[0]: float]),Some(ptyUnprotectFloat),false,true) res20: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2657/449484397@780f6346,FloatType,List(Some(class[value[0]: float]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: float]),Some(ptyReprotectFloat),false,true) res21: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2658/311232024@4718da4b,DoubleType,List(Some(class[value[0]: double]), Some(class[value[0]: string])),Some(class[value[0]: double]),Some(ptyProtectDouble),false,true) res22: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2660/1882823613@136e7e2c,DoubleType,List(Some(class[value[0]: double]), Some(class[value[0]: string])),Some(class[value[0]: double]),Some(ptyUnprotectDouble),false,true) res23: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2661/1574577816@2f4f900d,DoubleType,List(Some(class[value[0]: double]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: double]),Some(ptyReprotectDouble),false,true) res24: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2662/701508258@404d6f2,DateType,List(Some(class[value[0]: date]), Some(class[value[0]: string])),Some(class[value[0]: date]),Some(ptyProtectDate),true,true) res25: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2673/1441934479@512f3e71,DateType,List(Some(class[value[0]: date]), Some(class[value[0]: string])),Some(class[value[0]: date]),Some(ptyUnprotectDate),true,true) res26: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2674/19354823@7bacb1b0,DateType,List(Some(class[value[0]: date]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: date]),Some(ptyReprotectDate),true,true) res27: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2675/1203531300@31fe39d3,TimestampType,List(Some(class[value[0]: timestamp]), Some(class[value[0]: string])),Some(class[value[0]: timestamp]),Some(ptyProtectDateTime),true,true) res28: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2676/1395761147@5d81b1ef,TimestampType,List(Some(class[value[0]: timestamp]), Some(class[value[0]: string])),Some(class[value[0]: timestamp]),Some(ptyUnprotectDateTime),true,true) res29: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2677/971152222@1af59a5e,TimestampType,List(Some(class[value[0]: timestamp]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: timestamp]),Some(ptyReprotectDateTime),true,true) res30: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2678/449445798@4f994c53,DecimalType(38,18),List(Some(class[value[0]: decimal(38,18)]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: decimal(38,18)]),Some(ptyProtectDecimal),true,true) res31: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2687/375594857@7f5ae905,DecimalType(38,18),List(Some(class[value[0]: decimal(38,18)]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: decimal(38,18)]),Some(ptyUnprotectDecimal),true,true) res32: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2688/2133807474@33f1f5a,DecimalType(38,18),List(Some(class[value[0]: decimal(38,18)]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: decimal(38,18)]),Some(ptyReprotectDecimal),true,true) res33: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2691/1933809761@d57894d,BinaryType,List(Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: binary]),Some(ptyStringEnc),true,true) res34: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2693/255369243@25ed9699,StringType,List(Some(class[value[0]: binary]), Some(class[value[0]: string])),Some(class[value[0]: string]),Some(ptyStringDec),true,true) res35: org.apache.spark.sql.expressions.UserDefinedFunction = SparkUserDefinedFunction($Lambda$2694/542980564@7382cd26,BinaryType,List(Some(class[value[0]: binary]), Some(class[value[0]: string]), Some(class[value[0]: string])),Some(class[value[0]: binary]),Some(ptyStringReEnc),true,true)
Registering the PySpark Scala Wrapper user-defined functions
Log in to the master node with a user account having permissions to create and drop UDFs.
To navigate to the directory that contains the helper script, run the following command:
cd /opt/cloudera/parcels/PTY_BDP/pepspark/scriptsTo create the UDFs using the helper script, run the following command in the
pysparkshell:exec(open("/opt/cloudera/parcels/PTY_BDP/pepspark/scripts/create_scala_wrapper_udfs.py").read());Press ENTER.
The script creates all the required Scala Wrapper user-defined functions in the current pyspark session.
Feedback
Was this page helpful?