Table of Contents

Search

  1. Preface
  2. Introduction to Big Data Management Administration
  3. Authentication
  4. Running Mappings on a Cluster with Kerberos Authentication
  5. Authorization
  6. Cluster Configuration
  7. Cloud Provisioning Configuration
  8. Data Integration Service Processing
  9. Connections
  10. Multiple Blaze Instances on a Cluster
  11. Monitoring REST API

Big Data Management Administrator Guide

Big Data Management Administrator Guide

Sample Retrieve Mapping Execution Plans

Sample Retrieve Mapping Execution Plans

The sample use case is to use the script to retrieve the details of the mapping execution plans.
You can use the REST API to retrieve information about the mapping execution plans with the following request URL for a mapping with Job ID as _TNoO9ELEeiimY76kFyfuw:
<RESTOperationsHubService_Host>:<RESTOperationsHubService_Port>/restopshub/services/v1/MappingService/MappingExecutionPlans('_TNoO9ELEeiimY76kFyfuw')

Mapping Execution Plans Output

{ "@odata.context": "$metadata#MappingExecutionPlans/$entity", "jobId": "_TNoO9ELEeiimY76kFyfuw", "scriptList": [ { "name": "InfaSpark0", "content": "package com.informatica.exec\n\nimport com.informatica.bootstrap.functions._\nimport com.informatica.bootstrap.InfaParams._\nimport com.informatica.bootstrap.InfaStreaming.writeToKafka\nimport com.informatica.products.infatransform.spark.boot._\nimport com.informatica.bootstrap._\nimport com.informatica.hive._\nimport com.informatica.bootstrap.{JsonProtocol => JP}\nimport org.apache.spark._\nimport org.apache.spark.rdd._\nimport org.apache.spark.storage.StorageLevel._\nimport org.apache.spark.sql._\nimport org.apache.spark.sql.types._\nimport org.apache.spark.sql.functions._\nimport org.apache.spark.sql.functions.{ broadcast => infabroadcast }\nimport org.apache.spark.sql.infa.expressions._\nimport java.io._\nimport java.sql.Timestamp\nimport scala.reflect.ClassTag\nimport org.apache.spark.sql.catalyst.expressions.Caster\nimport org.apache.spark.sql.catalyst.expressions.JavaCaster\nimport com.informatica.bootstrap.JavaTx._\nimport org.apache.spark.Accumulator\nimport org.apache.spark.util.LongAccumulator\nimport org.apache.spark.scheduler.SparkListener\nimport org.apache.spark.SparkEnv\nimport org.apache.spark.sql.Row\n\nobject InfaSpark0 {\n def main(s:Array[String]) {\n val sc = SparkContextLoader.getSparkContext\n val sqlContext = SparkContextLoader.getSQLContext\n val ls = new LiveStream(sc.getConf)\n ls.relay(JP.sparkConfToJson(sc.getConf)) \n ls.relay(JP.hadoopConfToJson(sc.hadoopConfiguration)) \n val lis = new InfaListener(ls,\"TAG\")\n sc.addSparkListener(lis) \nsqlContext.sparkSession.experimental.extraPreprocessing = new InfaTaggingRules().rules\n val accs = List()\n ls.relay(JP.sparkAppDetailsToJson(sc.getConf, accs)) \n lis.accumulators = accs\n import sqlContext.implicits._\n import org.apache.spark.sql.functions.{stddev_samp, var_samp}\n val icast = caster(\"MM/DD/YYYY HH24:MI:SS\")\n val acast = adapterCaster()\n val jcast = JavaCaster()\n\n try {\n Tuple2(sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"0\"))), sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"1\"))));\n Tuple2(sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"2\"))), sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"3\"))));\n Tuple2(sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"4\"))), sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"5\"))));\n val v0 = infabroadcast(asBlock(sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"6\")))).tag(\"SRC_Read_students_5\").itoDF(\"m\")).itoDF;\n val v1 = updatePartitions(asBlock(sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"7\")))).tag(\"SRC_Read_students_HDFS_src\").itoDF(\"d\"), v0);\n val v2 = v1.join(v0, v0(0).===(v1(0)), \"inner\").itoDF(\"m\");\n val v3 = updatePartitions(asBlock(sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"8\")))).tag(\"SRC_Read_student\").itoDF(\"d\"), v2);\n val v4 = v3.join(v2, v2(1).===(v3(0)), \"inner\").itoDF;\n val v5 = DataTypes.createDecimalType(28, 0);\n val v6 = DataTypes.createDecimalType(18, 0);\n asBlock(sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"9\"))), v4.iselect(icast(icast(v4(2), v5), v6), v4(3), v4(4), icast(icast(v4(5), v5), v6)).itoDF(\"TGT_\").tag(\"TGT_Write_HDFSAppendTarget\").itoDF(\"c\").createOrReplaceTempView(\"tbl0\"));\n } finally {\n sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"10\")));\n sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"11\")));\n sqlContext.sql(InfaParams.resolve(InfaParams[String](\"InfaSpark0\", \"12\")));\n }\n sc.stop\n}\n}\n[0] -> [DROP TABLE IF EXISTS `default`.`w7939778750618549156_infa_read_students_hdfs_src_hdfstgtappend_multipartition_sparkmode`]\n[1] -> [CREATE TABLE `default`.`w7939778750618549156_infa_read_students_hdfs_src_hdfstgtappend_multipartition_sparkmode` (`col0` INT, `col1` STRING, `col2` STRING, `col3` INT, `col4` STRING) ROW FORMAT SERDE 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFASerDe' STORED AS INPUTFORMAT 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFAInputFormat' OUTPUTFORMAT 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFAOutputFormat' LOCATION 'hdfs://nameservice1//tmp/SPARK_impUser1/sess7939778750618549156//W7939778750618549156_infa_Read_students_HDFS_src_HDFSTgtAppend_MultiPartition_SparkMode' TBLPROPERTIES ('infa.columns.types'='int,string,string,int,string', 'pwx.mapping.file.path'='./Read_students_HDFS_src_MAPPING_37960411407997671_37960411786739094.bin', 'auto.purge'='true', 'infa.columns'='col0,col1,col2,col3,col4')]\n[2] -> [DROP TABLE IF EXISTS `default`.`w7939778750618549156_infa_read_students_5_hdfstgtappend_multipartition_sparkmode`]\n[3] -> [CREATE TABLE `default`.`w7939778750618549156_infa_read_students_5_hdfstgtappend_multipartition_sparkmode` (`col0` INT, `col1` STRING, `col2` STRING, `col3` INT) ROW FORMAT SERDE 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFASerDe' STORED AS INPUTFORMAT 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFAInputFormat' OUTPUTFORMAT 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFAOutputFormat' LOCATION 'hdfs://nameservice1//tmp/SPARK_impUser1/sess7939778750618549156//W7939778750618549156_INFACOPY_Read_students_5_HDFSTgtAppend_MultiPartition_SparkMode' TBLPROPERTIES ('infa.columns.types'='int,string,string,int', 'pwx.mapping.file.path'='./Read_students_5_MAPPING_37960411392603831_37960411887963169.bin', 'auto.purge'='true', 'infa.columns'='col0,col1,col2,col3')]\n[4] -> [DROP TABLE IF EXISTS `default`.`w7939778750618549156_infa_write_hdfsappendtarget_hdfstgtappend_multipartition_sparkmode`]\n[5] -> [CREATE TABLE `default`.`w7939778750618549156_infa_write_hdfsappendtarget_hdfstgtappend_multipartition_sparkmode` (`col0` DECIMAL(18, 0), `col1` STRING, `col2` STRING, `col3` DECIMAL(18, 0)) ROW FORMAT SERDE 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFASerDe' STORED AS INPUTFORMAT 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFAInputFormat' OUTPUTFORMAT 'com.informatica.platform.dtm.executor.hive.boot.storagehandler.INFAOutputFormat' LOCATION 'hdfs://nameservice1//tmp/SPARK_impUser1/sess7939778750618549156//W7939778750618549156_infa_Write_HDFSAppendTarget_HDFSTgtAppend_MultiPartition_SparkMode' TBLPROPERTIES ('infa.columns.types'='decimal(18,0),string,string,decimal(18,0)', 'pwx.mapping.file.path'='./Write_HDFSAppendTarget_MAPPING_37960411526174778_37960411903682194.bin', 'pwx.skip.serialization'='true', 'auto.purge'='true', 'infa.columns'='col0,col1,col2,col3')]\n[6] -> [SELECT `w7939778750618549156_infa_read_students_5_hdfstgtappend_multipartition_sparkmode`.`col0` as a0, `w7939778750618549156_infa_read_students_5_hdfstgtappend_multipartition_sparkmode`.`col1` as a1, `w7939778750618549156_infa_read_students_5_hdfstgtappend_multipartition_sparkmode`.`col2` as a2, `w7939778750618549156_infa_read_students_5_hdfstgtappend_multipartition_sparkmode`.`col3` as a3 FROM `default`.`w7939778750618549156_infa_read_students_5_hdfstgtappend_multipartition_sparkmode`]\n[7] -> [SELECT `w7939778750618549156_infa_read_students_hdfs_src_hdfstgtappend_multipartition_sparkmode`.`col0` as a0 FROM `default`.`w7939778750618549156_infa_read_students_hdfs_src_hdfstgtappend_multipartition_sparkmode`]\n[8] -> [SELECT alias.id as a0 FROM DEFAULT.student alias]\n[9] -> [INSERT OVERWRITE TABLE `default`.`w7939778750618549156_infa_write_hdfsappendtarget_hdfstgtappend_multipartition_sparkmode` SELECT tbl0.c0 as a0, tbl0.c1 as a1, tbl0.c2 as a2, tbl0.c3 as a3 FROM tbl0]\n[10] -> [DROP TABLE IF EXISTS `default`.`w7939778750618549156_infa_write_hdfsappendtarget_hdfstgtappend_multipartition_sparkmode`]\n[11] -> [DROP TABLE IF EXISTS `default`.`w7939778750618549156_infa_read_students_5_hdfstgtappend_multipartition_sparkmode`]\n[12] -> [DROP TABLE IF EXISTS `default`.`w7939778750618549156_infa_read_students_hdfs_src_hdfstgtappend_multipartition_sparkmode`]", "depends": "Pre_Spark_Task_Command_1" }, { "name": "Pre_Spark_Task_Command_1", "content": "-- ----\n-- Command [Pre_Spark_Task_Command_1_1] \n/data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6/scripts/FileCopyUtil --hadoop.home /data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6 --hdfsUser impUser1 --copyFromLocal --spn adpqa@INFAKRB.INFADEV.COM --keytab /bdmqa/BDM_Automation/Source/adpqa_AD.keytab --ccoConfPath /data/Informatica/10.2.2_252/tomcat/bin/disTemp/inkrh71hdp07_252/DIS_HDP_2.6/cco_hdp_26/SPARK/infacco-site.xml file:///bdmqa/BDM_Automation/Source/students_10.txt hdfs://nameservice1//tmp/SPARK_impUser1/sess7939778750618549156//W7939778750618549156_INFACOPY_Read_students_5_HDFSTgtAppend_MultiPartition_SparkMode/students_10.txt\n-- ----\n-- Command [Pre_Spark_Task_Command_1_2] \n/data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6/scripts/HadoopFsMkdir --hadoop.home /data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6 --hdfsUser impUser1 --spn adpqa@INFAKRB.INFADEV.COM --keytab /bdmqa/BDM_Automation/Source/adpqa_AD.keytab --ccoConfPath /data/Informatica/10.2.2_252/tomcat/bin/disTemp/inkrh71hdp07_252/DIS_HDP_2.6/cco_hdp_26/SPARK/infacco-site.xml hdfs://nameservice1//tmp/SPARK_impUser1/sess7939778750618549156//W7939778750618549156_Write_HDFSAppendTarget_HDFSTgtAppend_MultiPartition_SparkMode/reject-files/\n-- ----\n-- Command [Pre_Spark_Task_Command_1_3] \n/data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6/scripts/HadoopFsRmRf --hadoop.home /data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6 --hdfsUser impUser1 --spn adpqa@INFAKRB.INFADEV.COM --keytab /bdmqa/BDM_Automation/Source/adpqa_AD.keytab --ccoConfPath /data/Informatica/10.2.2_252/tomcat/bin/disTemp/inkrh71hdp07_252/DIS_HDP_2.6/cco_hdp_26/SPARK/infacco-site.xml hdfs://nameservice1/BDM_Automation/Target/HDFSTargetAppend/SparkMode/MultiPartition//StudentHDFSTargetAppend.out-[mr]?[0-9]*\n-- ----\n-- Command [Pre_Spark_Task_Command_1_4] \ntouch /bdmqa/BDM_Automation/Target/StudentHDFSTargetAppend61c555e1-c967-4888-bc7d-ab9ada8ee2a7_empty.txt\n-- ----\n-- Command [Pre_Spark_Task_Command_1_5] \n/data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6/scripts/FileCopyUtil --hadoop.home /data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6 --hdfsUser impUser1 --copyFromLocal --spn adpqa@INFAKRB.INFADEV.COM --keytab /bdmqa/BDM_Automation/Source/adpqa_AD.keytab --ccoConfPath /data/Informatica/10.2.2_252/tomcat/bin/disTemp/inkrh71hdp07_252/DIS_HDP_2.6/cco_hdp_26/SPARK/infacco-site.xml file:///bdmqa/BDM_Automation/Target/StudentHDFSTargetAppend61c555e1-c967-4888-bc7d-ab9ada8ee2a7_empty.txt hdfs://nameservice1/BDM_Automation/Target/HDFSTargetAppend/SparkMode/MultiPartition//StudentHDFSTargetAppend.out\n-- ----\n-- Command [Pre_Spark_Task_Command_1_6] \n/data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6/scripts/HadoopFsRmRf --hadoop.home /data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6 --hdfsUser impUser1 --spn adpqa@INFAKRB.INFADEV.COM --keytab /bdmqa/BDM_Automation/Source/adpqa_AD.keytab --ccoConfPath /data/Informatica/10.2.2_252/tomcat/bin/disTemp/inkrh71hdp07_252/DIS_HDP_2.6/cco_hdp_26/SPARK/infacco-site.xml hdfs://nameservice1/BDM_Automation/Target/HDFSTargetAppend/SparkMode/MultiPartition//*_tmp_infa_7939778750618549156_StudentHDFSTargetAppend.out\n-- ----\n-- Command [Pre_Spark_Task_Command_1_7] \n/data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6/scripts/HadoopFsMkdir --hadoop.home /data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6 --hdfsUser impUser1 --spn adpqa@INFAKRB.INFADEV.COM --keytab /bdmqa/BDM_Automation/Source/adpqa_AD.keytab --ccoConfPath /data/Informatica/10.2.2_252/tomcat/bin/disTemp/inkrh71hdp07_252/DIS_HDP_2.6/cco_hdp_26/SPARK/infacco-site.xml hdfs://nameservice1/BDM_Automation/Target/HDFSTargetAppend/SparkMode/MultiPartition//_tmp_infa_7939778750618549156_StudentHDFSTargetAppend.out/\n", "depends": "" }, { "name": "Post_Spark_Task_Command_1", "content": "-- ----\n-- Command [Post_Spark_Task_Command_1_1] \n/data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6/scripts/FileCopyUtil --hadoop.home /data/Informatica/10.2.2_252/services/shared/hadoop/HDP_2.6 --mergefiles --deleteEmptyFiles --hdfsUser impUser1 --copyToLocal --spn adpqa@INFAKRB.INFADEV.COM --keytab /bdmqa/BDM_Automation/Source/adpqa_AD.keytab --ccoConfPath /data/Informatica/10.2.2_252/tomcat/bin/disTemp/inkrh71hdp07_252/DIS_HDP_2.6/cco_hdp_26/SPARK/infacco-site.xml hdfs://nameservice1//tmp/SPARK_impUser1/sess7939778750618549156//W7939778750618549156_Write_HDFSAppendTarget_HDFSTgtAppend_MultiPartition_SparkMode/reject-files file:///data/Informatica/10.2.2_252/tomcat/bin/reject/.bad\n", "depends": "InfaSpark0" } ] }

0 COMMENTS

We’d like to hear from you!