Hadoop的Mapper是怎麼從HDFS上讀取TextInputFormat資料的
阿新 • • 發佈:2018-12-31
LineRecordReader.next(LongWritable key, Text value)
LineReader.readLine(Text str, int maxLineLength, int maxBytesToConsume)
DataInputStream.read(byte b[]) /* DFSDataInputStream繼承此方法 */
DFSInputStream.read(long position, byte[] buffer, int offset, int length)
DFSInputStream.fetchBlockByteRange(LocatedBlock block, long start,long end, byte[] buf, int offset)
BlockReader.readAll(byte[] buf, int offset, int len)
FSInputChecker.readFully(InputStream stm, byte[] buf, int offset, int len)
BlockReader.read(byte[] buf, int off, int len)
FSInputChecker.read(byte[] b, int off, int len)
FSInputChecker.read1(byte b[], int off, int len)
FSInputChecker.readChecksumChunk(byte b[], final int off, final int len)
BlockReader.readChunk(long pos, byte[] buf, int offset, int len, byte[] checksumBuf)
IOUtils.readFullyreadFully( InputStream in, byte buf[], int off, int len)
DataInputStream.read(byte b[], int off, int len)
BufferedInputStream.read(byte b[], int off, int len)
BufferedInputStream.read1(byte[] b, int off, int len)
org.apache.hadoop.net.SocketInputStream.read(byte[] b, int off, int len)
org.apache.hadoop.net.SocketInputStream.read(ByteBuffer dst)
org.apache.hadoop.net.SocketIOWithTimeout.doIO(ByteBuffer buf, int ops)
org.apache.hadoop.net.SocketInputStream.Reader.performIO(ByteBuffer buf)
sun.nio.ch.SocketChannelImpl.read(ByteBuffer buf)
sun.nio.ch.IOUtiil.read(FileDescriptor fd, ByteBuffer dst, long position, NativeDispatcher nd, Object lock)
sun.nio.ch.IOUtiil.readIntoNativeBuffer(FileDescriptor fd, ByteBuffer bb, long position, NativeDispatcher nd,Object lock)
sun.nio.ch.SocketDispatcher.read(FileDescriptor fd, long address, int len)
sun.nio.ch.SocketDispatcher.read0(FileDescriptor fd, long address, int len) /* Native Method,根據不同的JDK實現不同 */