public class SparkScript extends AbstractSyntaxTree
AbstractSyntaxTree.LogCounter
Modifier and Type | Field and Description |
---|---|
static java.lang.String |
CONTEXT_AGGREGATE_FUNCTIONS |
static java.lang.String |
CONTEXT_DICT_2_TUPLE |
static java.lang.String |
CONTEXT_HIVE_CONTEXT |
static java.lang.String |
CONTEXT_JSON_SUPPORT |
static java.lang.String |
CONTEXT_NONE_RDD_SUPPORT |
static java.lang.String |
CONTEXT_SPARK_CONTEXT |
static java.lang.String |
CONTEXT_SQL_CONTEXT |
Modifier and Type | Method and Description |
---|---|
java.lang.String |
customImports()
Return String value for the property spark.customPythonImports
|
java.util.List |
getChildren()
Get the list of child objects owned by this substitution API object.
|
SparkCommand |
getCommand()
Get the very last command in this this spark script.
|
java.util.List |
getCommands()
Get list of commands for this spark script.
|
SparkCommandType |
getCommandType()
Get the type of the last command for spark script
|
MapPhysicalDesign |
getCurrentPhysicalDesign()
Get the physical design for the physical node associated with this script.
|
java.lang.String |
getDebugMode()
Get the debug mode for this script.
|
java.util.Set |
getDestinationTargets()
Get the set of destination target names for this script.
|
java.util.List |
getEkmOptionNameList()
Returns the list of EKM option names.
|
java.lang.String |
getEkmOptionValue(java.lang.String key)
Returns an EKM option value.
|
java.lang.String |
getEkmOptionValue(java.lang.String key,
java.lang.String defaultValue)
Returns an EKM option value.
|
java.util.List |
getExpressions()
Get a list of expressions owned by this substitution API object.
|
java.lang.String |
getExprText(MapConnectorPoint scopingInputPoint,
MapExpression expr,
boolean referenceOutputFields)
Return the spark script expression text according to a given map expression.
|
java.lang.String |
getExprTextDF(MapConnectorPoint scopingInputPoint,
MapExpression expr,
boolean referenceOutputFields,
boolean qualified)
Return the spark script expression text according to a given map expression, specific to DataFrames.
|
java.lang.String |
getExprTextDF(MapConnectorPoint scopingInputPoint,
MapExpression expr,
boolean referenceOutputFields,
boolean qualified,
java.lang.String qualifier)
Return the spark script expression text according to a given map expression, specific to DataFrames.
|
MapConnectorPoint |
getInputConnectorPoint(SparkSchema inputSchema)
Get the input connector point according to the spark input schema.
|
SparkField |
getInputField(MapAttribute attr)
Get the input spark field according to a specific map attribute.
|
java.util.List |
getInputSchemas()
Get all the input spark schemas for this script.
|
java.util.List |
getInputScriptMasks()
Get the input script masks.
|
java.util.List |
getInputScripts()
Get a list of scripts as the input of current one.
|
java.util.List |
getInputSqlQueries()
Get the Input sql queries.
|
java.lang.String |
getMapName()
Get the text for the fileName.
|
java.lang.String |
getMapNameLong()
Get the text for the fileNameLong.
|
Mapping |
getMapping()
Get the corresponding mapping for this script.
|
MapConnectorPoint |
getOutputConnectorPoint()
Get the output connector point for the physical map node which this script reference.
|
SparkSchema |
getOutputSchema()
Get the output spark schema which is calculated from the last command.
|
SqlQuery |
getOutputSqlQuery()
Get the Output SqlQuery.
|
SparkScript |
getParentScript()
Get this script's parent script.
|
MapPhysicalNode |
getPhysicalNode()
Get the physical node that the last command is corresponding to.
|
SparkScript |
getSharedInputScript()
Get shared input script.
|
java.lang.String |
getSparkDataServerProperty(java.lang.String key)
Returns a ?-level codegen expression expanding to the respective Spark DataServer property.
|
java.lang.String |
getSparkDataServerProperty(java.lang.String key,
java.lang.String defaultValue)
Returns a ?-level codegen expression expanding to the respective Spark DataServer property.
|
ExecutionUnit |
getSparkExecutionUnit()
Get Spark Execution Unit associated with this script.
|
java.lang.String |
getSparkLSchemaName()
Get logical schema name of Spark Execution Unit associated with this script.
|
java.lang.String |
getStreamingBatchDuration()
Return integer value for the property spark.batchDuration.
|
java.util.Map |
getTemplateSubstitutionMap()
Get a hash map containing built-in template substitution variable names as the hash key,
and the substitution variable value as values.
|
java.lang.String |
getText()
Get the text for this script.
|
java.lang.String |
getType()
Get the name of AST Object as the type.
|
boolean |
isCheckpointEnabled()
Return boolean value for the property spark.checkpointing
|
static boolean |
isNodeInStreamingMode(MapPhysicalNode node)
Return true if streaming mode is being used for the associated mapping node.
|
boolean |
isPhysicalDesignInStreamingMode()
Return true if streaming mode is switched on for the mapping.
|
boolean |
isSelector()
Return True if the last command on this script is based on a selector component.
|
java.lang.String |
toString()
Return basic information about SparkScript with the description of its contents
|
boolean |
useDataFrames()
Return boolean value for the property spark.useDataFrames
|
getCodeGenerationTemplate, getCodeGenerationTemplateName, getCustomTemplate, getKMName, getLeafLevelChildren, getMapPhysicalNode, getOrder, getParentAST, getParentOfType, getPropertyValue, getSourceLanguage, getSourceLocation, getSourceTechnology, getSourceText, getTargetLanguage, getTargetLocation, getTargetTechnology, getTargetText, hasCustomTemplate, hasSourceAndTargetText, isLeafLevelNode, isPushFromSource
public static final java.lang.String CONTEXT_SPARK_CONTEXT
public static final java.lang.String CONTEXT_HIVE_CONTEXT
public static final java.lang.String CONTEXT_SQL_CONTEXT
public static final java.lang.String CONTEXT_NONE_RDD_SUPPORT
public static final java.lang.String CONTEXT_DICT_2_TUPLE
public static final java.lang.String CONTEXT_AGGREGATE_FUNCTIONS
public static final java.lang.String CONTEXT_JSON_SUPPORT
public SparkScript getParentScript()
public java.util.List getInputScripts()
public SparkCommandType getCommandType()
public SparkCommand getCommand()
public MapPhysicalNode getPhysicalNode()
public boolean isSelector()
public java.util.List getInputScriptMasks()
public java.lang.String getDebugMode()
public java.lang.String getType()
getType
in class AbstractSyntaxTree
public java.util.List getCommands() throws GenerationException
GenerationException
public java.util.List getChildren()
AbstractSyntaxTree
getChildren
in class AbstractSyntaxTree
public java.util.List getExpressions()
AbstractSyntaxTree
getExpressions
in class AbstractSyntaxTree
public java.lang.String getText() throws GenerationException
getText
in class AbstractSyntaxTree
GenerationException
public java.lang.String getMapNameLong() throws GenerationException
GenerationException
public java.lang.String getMapName() throws GenerationException
GenerationException
public java.util.Map getTemplateSubstitutionMap()
AbstractSyntaxTree
getTemplateSubstitutionMap
in class AbstractSyntaxTree
public SparkSchema getOutputSchema() throws GenerationException
GenerationException
public java.util.List getInputSchemas() throws GenerationException
GenerationException
public MapConnectorPoint getInputConnectorPoint(SparkSchema inputSchema)
inputSchema
- The spark input schema.public SparkField getInputField(MapAttribute attr) throws GenerationException
attr
- The map attribute which is used to find the corresponding spark input filed.GenerationException
public java.lang.String getExprText(MapConnectorPoint scopingInputPoint, MapExpression expr, boolean referenceOutputFields) throws GenerationException
scopingInputPoint
- If it's not null, specifies a mapping path that includes the specified input connector point. The scoping point is an input point owned by the same component that owns the expressionexpr
- The given map expression.referenceOutputFields
- If true, indicates that the expression references output fields. If false, indicates that only input fields are referenced.GenerationException
public java.lang.String getExprTextDF(MapConnectorPoint scopingInputPoint, MapExpression expr, boolean referenceOutputFields, boolean qualified) throws GenerationException
scopingInputPoint
- If it's not null, specifies a mapping path that includes the specified input connector point. The scoping point is an input point owned by the same component that owns the expressionexpr
- The given map expression.referenceOutputFields
- If true, indicates that the expression references output fields. If false, indicates that only input fields are referenced.qualified
- If true, indicates that the expression referenced fields will be prefixed with the dataframe nameGenerationException
public java.lang.String getExprTextDF(MapConnectorPoint scopingInputPoint, MapExpression expr, boolean referenceOutputFields, boolean qualified, java.lang.String qualifier) throws GenerationException
scopingInputPoint
- If it's not null, specifies a mapping path that includes the specified input connector point. The scoping point is an input point owned by the same component that owns the expressionexpr
- The given map expression.referenceOutputFields
- If true, indicates that the expression references output fields. If false, indicates that only input fields are referenced.qualified
- If true, indicates that the expression referenced fields will be prefixed with the dataframe namequalifier
- Quilifier nameGenerationException
public java.util.List getInputSqlQueries() throws GenerationException
GenerationException
public SqlQuery getOutputSqlQuery() throws GenerationException
GenerationException
public MapConnectorPoint getOutputConnectorPoint()
public java.util.Set getDestinationTargets()
public Mapping getMapping() throws GenerationException
GenerationException
public SparkScript getSharedInputScript()
public java.lang.String toString()
toString
in class java.lang.Object
public boolean isPhysicalDesignInStreamingMode()
public static boolean isNodeInStreamingMode(MapPhysicalNode node) throws PropertyException
node
- the physical node to check.PropertyException
public java.lang.String getStreamingBatchDuration()
public boolean useDataFrames()
public java.lang.String customImports()
public boolean isCheckpointEnabled()
public MapPhysicalDesign getCurrentPhysicalDesign()
public ExecutionUnit getSparkExecutionUnit() throws GenerationException
GenerationException
public java.lang.String getSparkLSchemaName() throws GenerationException
GenerationException
public java.util.List getEkmOptionNameList() throws MappingException, GenerationException
MappingException
GenerationException
public java.lang.String getEkmOptionValue(java.lang.String key, java.lang.String defaultValue) throws MappingException, GenerationException
MappingException
GenerationException
public java.lang.String getEkmOptionValue(java.lang.String key) throws MappingException, GenerationException
MappingException
GenerationException
public java.lang.String getSparkDataServerProperty(java.lang.String key, java.lang.String defaultValue) throws GenerationException
GenerationException
public java.lang.String getSparkDataServerProperty(java.lang.String key) throws GenerationException
GenerationException