public class CarbonHiveInputSplit extends org.apache.hadoop.mapred.FileSplit implements org.apache.carbondata.core.datastore.block.Distributable, Serializable, org.apache.hadoop.io.Writable, org.apache.carbondata.hadoop.internal.index.Block
| Constructor and Description |
|---|
CarbonHiveInputSplit() |
CarbonHiveInputSplit(String segmentId,
org.apache.hadoop.fs.Path path,
long start,
long length,
String[] locations,
org.apache.carbondata.core.metadata.ColumnarFormatVersion version) |
CarbonHiveInputSplit(String segmentId,
org.apache.hadoop.fs.Path path,
long start,
long length,
String[] locations,
int numberOfBlocklets,
org.apache.carbondata.core.metadata.ColumnarFormatVersion version) |
CarbonHiveInputSplit(String segmentId,
org.apache.hadoop.fs.Path path,
long start,
long length,
String[] locations,
int numberOfBlocklets,
org.apache.carbondata.core.metadata.ColumnarFormatVersion version,
Map<String,String> blockStorageIdMap)
Constructor to initialize the CarbonInputSplit with blockStorageIdMap
|
| Modifier and Type | Method and Description |
|---|---|
int |
compareTo(org.apache.carbondata.core.datastore.block.Distributable o) |
static List<org.apache.carbondata.core.datastore.block.TableBlockInfo> |
createBlocks(List<CarbonHiveInputSplit> splitList) |
boolean |
equals(Object obj) |
static CarbonHiveInputSplit |
from(String segmentId,
org.apache.hadoop.mapred.FileSplit split,
org.apache.carbondata.core.metadata.ColumnarFormatVersion version) |
boolean |
fullScan() |
String |
getBlockPath() |
Map<String,String> |
getBlockStorageIdMap()
returns map of blocklocation and storage id
|
String |
getBucketId() |
org.apache.carbondata.core.indexstore.BlockletDetailInfo |
getDetailInfo() |
List<String> |
getInvalidSegments() |
List<org.apache.carbondata.core.mutate.UpdateVO> |
getInvalidTimestampRange() |
List<Long> |
getMatchedBlocklets() |
int |
getNumberOfBlocklets()
returns the number of blocklets
|
String |
getSegmentId() |
static org.apache.carbondata.core.datastore.block.TableBlockInfo |
getTableBlockInfo(CarbonHiveInputSplit inputSplit) |
org.apache.carbondata.core.metadata.ColumnarFormatVersion |
getVersion() |
int |
hashCode() |
void |
readFields(DataInput in) |
void |
setDetailInfo(org.apache.carbondata.core.indexstore.BlockletDetailInfo detailInfo) |
void |
setInvalidSegments(List<String> invalidSegments) |
void |
setInvalidTimestampRange(List<org.apache.carbondata.core.mutate.UpdateVO> invalidTimestamps) |
void |
setVersion(org.apache.carbondata.core.metadata.ColumnarFormatVersion version) |
void |
write(DataOutput out) |
getLength, getLocationInfo, getLocations, getPath, getStart, toStringpublic CarbonHiveInputSplit()
public CarbonHiveInputSplit(String segmentId, org.apache.hadoop.fs.Path path, long start, long length, String[] locations, org.apache.carbondata.core.metadata.ColumnarFormatVersion version)
public CarbonHiveInputSplit(String segmentId, org.apache.hadoop.fs.Path path, long start, long length, String[] locations, int numberOfBlocklets, org.apache.carbondata.core.metadata.ColumnarFormatVersion version)
public CarbonHiveInputSplit(String segmentId, org.apache.hadoop.fs.Path path, long start, long length, String[] locations, int numberOfBlocklets, org.apache.carbondata.core.metadata.ColumnarFormatVersion version, Map<String,String> blockStorageIdMap)
segmentId - path - start - length - locations - numberOfBlocklets - version - blockStorageIdMap - public static CarbonHiveInputSplit from(String segmentId, org.apache.hadoop.mapred.FileSplit split, org.apache.carbondata.core.metadata.ColumnarFormatVersion version) throws IOException
IOExceptionpublic static List<org.apache.carbondata.core.datastore.block.TableBlockInfo> createBlocks(List<CarbonHiveInputSplit> splitList)
public static org.apache.carbondata.core.datastore.block.TableBlockInfo getTableBlockInfo(CarbonHiveInputSplit inputSplit)
public String getSegmentId()
public void readFields(DataInput in) throws IOException
readFields in interface org.apache.hadoop.io.WritablereadFields in class org.apache.hadoop.mapred.FileSplitIOExceptionpublic void write(DataOutput out) throws IOException
write in interface org.apache.hadoop.io.Writablewrite in class org.apache.hadoop.mapred.FileSplitIOExceptionpublic void setInvalidTimestampRange(List<org.apache.carbondata.core.mutate.UpdateVO> invalidTimestamps)
public List<org.apache.carbondata.core.mutate.UpdateVO> getInvalidTimestampRange()
public int getNumberOfBlocklets()
public org.apache.carbondata.core.metadata.ColumnarFormatVersion getVersion()
public void setVersion(org.apache.carbondata.core.metadata.ColumnarFormatVersion version)
public String getBucketId()
public int compareTo(org.apache.carbondata.core.datastore.block.Distributable o)
compareTo in interface Comparable<org.apache.carbondata.core.datastore.block.Distributable>public String getBlockPath()
getBlockPath in interface org.apache.carbondata.hadoop.internal.index.Blockpublic List<Long> getMatchedBlocklets()
getMatchedBlocklets in interface org.apache.carbondata.hadoop.internal.index.Blockpublic boolean fullScan()
fullScan in interface org.apache.carbondata.hadoop.internal.index.Blockpublic Map<String,String> getBlockStorageIdMap()
public org.apache.carbondata.core.indexstore.BlockletDetailInfo getDetailInfo()
public void setDetailInfo(org.apache.carbondata.core.indexstore.BlockletDetailInfo detailInfo)
Copyright © 2016–2019 The Apache Software Foundation. All rights reserved.