public class MergeWork extends MapWork implements java.io.Serializable
SAMPLING_ON_PREV_MR, SAMPLING_ON_START| Constructor and Description |
|---|
MergeWork() |
MergeWork(java.util.List<java.lang.String> inputPaths,
java.lang.String outputDir) |
MergeWork(java.util.List<java.lang.String> inputPaths,
java.lang.String outputDir,
boolean hasDynamicPartitions,
DynamicPartitionCtx dynPartCtx) |
| Modifier and Type | Method and Description |
|---|---|
DynamicPartitionCtx |
getDynPartCtx() |
java.lang.String |
getInputformat() |
java.util.List<java.lang.String> |
getInputPaths() |
ListBucketingCtx |
getListBucketingCtx() |
java.lang.Class<? extends Mapper> |
getMapperClass() |
java.lang.Long |
getMinSplitSize() |
java.lang.String |
getOutputDir() |
boolean |
hasDynamicPartitions() |
boolean |
isGatheringStats() |
boolean |
isListBucketingAlterTableConcatenate() |
void |
resolveConcatenateMerge(HiveConf conf)
alter table ...
|
void |
resolveDynamicPartitionStoredAsSubDirsMerge(HiveConf conf,
Path path,
TableDesc tblDesc,
java.util.ArrayList<java.lang.String> aliases,
PartitionDesc partDesc) |
void |
setDynPartCtx(DynamicPartitionCtx dynPartCtx) |
void |
setHasDynamicPartitions(boolean hasDynamicPartitions) |
void |
setInputPaths(java.util.List<java.lang.String> inputPaths) |
void |
setListBucketingCtx(ListBucketingCtx listBucketingCtx) |
void |
setOutputDir(java.lang.String outputDir) |
addIndexIntermediateFile, addMapWork, configureJobConf, deriveExplainAttributes, getAliases, getAliasToPartnInfo, getAliasToWork, getAllRootOperators, getBucketedColsByDirectory, getHadoopSupportsSplittable, getIndexIntermediateFile, getJoinTree, getMapLocalWork, getMaxSplitSize, getMinSplitSizePerNode, getMinSplitSizePerRack, getNameToSplitSample, getNumMapTasks, getOpParseCtxMap, getPartitionDescs, getPaths, getPathToAliases, getPathToPartitionInfo, getSamplingType, getSamplingTypeString, getSortedColsByDirectory, getTmpHDFSFileURI, getTruncatedPathToAliases, getWorks, initialize, isInputFormatSorted, isMapperCannotSpanPartns, isUseBucketizedHiveInputFormat, mergeAliasedInput, mergingInto, setAliasToPartnInfo, setAliasToWork, setHadoopSupportsSplittable, setInputformat, setInputFormatSorted, setJoinTree, setMapLocalWork, setMapperCannotSpanPartns, setMaxSplitSize, setMinSplitSize, setMinSplitSizePerNode, setMinSplitSizePerRack, setNameToSplitSample, setNumMapTasks, setOpParseCtxMap, setPathToAliases, setPathToPartitionInfo, setSamplingType, setTmpHDFSFileURI, setUseBucketizedHiveInputFormatgetAllOperators, setGatheringStatsclonepublic MergeWork()
public MergeWork(java.util.List<java.lang.String> inputPaths,
java.lang.String outputDir)
public MergeWork(java.util.List<java.lang.String> inputPaths,
java.lang.String outputDir,
boolean hasDynamicPartitions,
DynamicPartitionCtx dynPartCtx)
public java.util.List<java.lang.String> getInputPaths()
public void setInputPaths(java.util.List<java.lang.String> inputPaths)
public java.lang.String getOutputDir()
public void setOutputDir(java.lang.String outputDir)
public java.lang.Class<? extends Mapper> getMapperClass()
public java.lang.Long getMinSplitSize()
getMinSplitSize in class MapWorkpublic java.lang.String getInputformat()
getInputformat in class MapWorkpublic boolean isGatheringStats()
isGatheringStats in class BaseWorkpublic boolean hasDynamicPartitions()
public void setHasDynamicPartitions(boolean hasDynamicPartitions)
public void resolveDynamicPartitionStoredAsSubDirsMerge(HiveConf conf, Path path, TableDesc tblDesc, java.util.ArrayList<java.lang.String> aliases, PartitionDesc partDesc)
resolveDynamicPartitionStoredAsSubDirsMerge in class MapWorkpublic void resolveConcatenateMerge(HiveConf conf)
public DynamicPartitionCtx getDynPartCtx()
public void setDynPartCtx(DynamicPartitionCtx dynPartCtx)
public ListBucketingCtx getListBucketingCtx()
public void setListBucketingCtx(ListBucketingCtx listBucketingCtx)
listBucketingCtx - the listBucketingCtx to setpublic boolean isListBucketingAlterTableConcatenate()
Copyright © 2012 The Apache Software Foundation