public class BlockMergeTask extends Task<MergeWork> implements java.io.Serializable, HadoopJobExecHook
Task.FeedType| Modifier and Type | Field and Description |
|---|---|
static java.lang.String |
INPUT_SEPERATOR |
protected JobConf |
job |
protected HadoopJobExecHelper |
jobExecHelper |
BACKUP_COMMON_JOIN, backupChildrenTasks, backupTask, childTasks, clonedConf, COMMON_JOIN, conf, console, CONVERTED_LOCAL_MAPJOIN, CONVERTED_MAPJOIN, CONVERTED_SORTMERGEJOIN, db, driverContext, feedSubscribers, id, initialized, isdone, jobID, LOCAL_MAPJOIN, LOG, MAPJOIN_ONLY_NOBACKUP, NO_TAG, parentTasks, queryPlan, queued, started, taskCounters, taskHandle, taskTag, work| Constructor and Description |
|---|
BlockMergeTask() |
| Modifier and Type | Method and Description |
|---|---|
boolean |
checkFatalErrors(Counters ctrs,
java.lang.StringBuilder errMsg) |
int |
execute(DriverContext driverContext)
This method is overridden in each Task.
|
java.lang.String |
getName()
Gets the name of the node.
|
StageType |
getType()
Should be overridden to return the type of the specific task among the types in StageType.
|
void |
initialize(HiveConf conf,
QueryPlan queryPlan,
DriverContext driverContext) |
void |
logPlanProgress(SessionState ss) |
static void |
main(java.lang.String[] args) |
boolean |
requireLock() |
void |
updateCounters(Counters ctrs,
RunningJob rj) |
addDependentTask, cloneConf, done, executeTask, fetch, getAndInitBackupTask, getBackupChildrenTasks, getBackupTask, getChildren, getChildTasks, getCounters, getDependentTasks, getFeedSubscribers, getId, getInitialized, getJobID, getParentTasks, getQueryPlan, getQueued, getReducer, getResultSchema, getTaskHandle, getTaskTag, getTopOperators, getWork, hasReduce, ifRetryCmdWhenFail, isLocalMode, isMapRedLocalTask, isMapRedTask, isRunnable, pushFeed, receiveFeed, removeDependentTask, removeFromChildrenTasks, setBackupChildrenTasks, setBackupTask, setChildTasks, setDone, setFeedSubscribers, setId, setInitialized, setLocalMode, setParentTasks, setQueryPlan, setQueued, setRetryCmdWhenFail, setStarted, setTaskTag, setWork, shutdown, started, subscribeFeedprotected transient JobConf job
protected HadoopJobExecHelper jobExecHelper
public static java.lang.String INPUT_SEPERATOR
public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext)
initialize in class Task<MergeWork>public boolean requireLock()
requireLock in class Task<MergeWork>public int execute(DriverContext driverContext)
Taskpublic java.lang.String getName()
Nodepublic static void main(java.lang.String[] args)
public StageType getType()
Taskpublic boolean checkFatalErrors(Counters ctrs,
java.lang.StringBuilder errMsg)
checkFatalErrors in interface HadoopJobExecHookpublic void logPlanProgress(SessionState ss) throws java.io.IOException
logPlanProgress in interface HadoopJobExecHookjava.io.IOExceptionpublic void updateCounters(Counters ctrs,
RunningJob rj)
throws java.io.IOException
updateCounters in interface HadoopJobExecHookjava.io.IOExceptionCopyright © 2012 The Apache Software Foundation