org.apache.hadoop.io.Text.toString() - java examples

Here are the examples of the java api org.apache.hadoop.io.Text.toString() taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

155 Examples 7

19 View Complete Implementation : JobID.java
Copyright Apache License 2.0
Author : aliyun-beta
public String getJtIdentifier() {
    return jtIdentifier.toString();
}

19 View Complete Implementation : TextDoubleTwoPairsWritableComparable.java
Copyright Apache License 2.0
Author : suhyunjeon
/**
 * Get the value of the text1
 */
public String getText1() {
    return text1.toString();
}

19 View Complete Implementation : WorkerId.java
Copyright Apache License 2.0
Author : apache
/**
 * Print workerId.
 * @return workeId in string
 */
public final String toString() {
    return workerId.toString();
}

19 View Complete Implementation : UserCommandKey.java
Copyright Apache License 2.0
Author : apache
public String getUser() {
    return user.toString();
}

19 View Complete Implementation : TextDoublePairWritableComparable.java
Copyright Apache License 2.0
Author : suhyunjeon
/**
 * Get the value of the text
 */
public String getText() {
    return text.toString();
}

19 View Complete Implementation : UserCommandKey.java
Copyright Apache License 2.0
Author : apache
public String getCommand() {
    return command.toString();
}

19 View Complete Implementation : LoadBalancingKMSClientProvider.java
Copyright Apache License 2.0
Author : apache
@Override
public String getCanonicalServiceName() {
    return canonicalService.toString();
}

19 View Complete Implementation : TextDoubleTwoPairsWritableComparable.java
Copyright Apache License 2.0
Author : suhyunjeon
/**
 * Get the value of the text2
 */
public String getText2() {
    return text2.toString();
}

19 View Complete Implementation : JobID.java
Copyright Apache License 2.0
Author : apache
public String getJtIdentifier() {
    return jtIdentifier.toString();
}

19 View Complete Implementation : KMSClientProvider.java
Copyright Apache License 2.0
Author : apache
@Override
public String getCanonicalServiceName() {
    return canonicalService.toString();
}

19 View Complete Implementation : JobID.java
Copyright Apache License 2.0
Author : yncxcw
public String getJtIdentifier() {
    return jtIdentifier.toString();
}

19 View Complete Implementation : UserCommandKey.java
Copyright Apache License 2.0
Author : apache
public String getType() {
    return type.toString();
}

18 View Complete Implementation : IOMapperBase.java
Copyright Apache License 2.0
Author : yncxcw
/**
 * Map file name and offset into statistical data.
 * <p>
 * The map task is to get the
 * <tt>key</tt>, which contains the file name, and the
 * <tt>value</tt>, which is the offset within the file.
 *
 * The parameters are preplaceded to the abstract method
 * {@link #doIO(Reporter,String,long)}, which performs the io operation,
 * usually read or write data, and then
 * {@link #collectStats(OutputCollector,String,long,Object)}
 * is called to prepare stat data for a subsequent reducer.
 */
public void map(Text key, LongWritable value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
    String name = key.toString();
    long longValue = value.get();
    reporter.setStatus("starting " + name + " ::host = " + hostName);
    this.stream = getIOStream(name);
    T statValue = null;
    long tStart = System.currentTimeMillis();
    try {
        statValue = doIO(reporter, name, longValue);
    } finally {
        if (stream != null)
            stream.close();
    }
    long tEnd = System.currentTimeMillis();
    long execTime = tEnd - tStart;
    collectStats(output, name, execTime, statValue);
    reporter.setStatus("finished " + name + " ::host = " + hostName);
}

18 View Complete Implementation : AccumulatingReducer.java
Copyright Apache License 2.0
Author : yncxcw
public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
    String field = key.toString();
    reporter.setStatus("starting " + field + " ::host = " + hostName);
    // concatenate strings
    if (field.startsWith(VALUE_TYPE_STRING)) {
        StringBuffer sSum = new StringBuffer();
        while (values.hasNext()) sSum.append(values.next().toString()).append(";");
        output.collect(key, new Text(sSum.toString()));
        reporter.setStatus("finished " + field + " ::host = " + hostName);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_FLOAT)) {
        float fSum = 0;
        while (values.hasNext()) fSum += Float.parseFloat(values.next().toString());
        output.collect(key, new Text(String.valueOf(fSum)));
        reporter.setStatus("finished " + field + " ::host = " + hostName);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_LONG)) {
        long lSum = 0;
        while (values.hasNext()) {
            lSum += Long.parseLong(values.next().toString());
        }
        output.collect(key, new Text(String.valueOf(lSum)));
    }
    reporter.setStatus("finished " + field + " ::host = " + hostName);
}

18 View Complete Implementation : ValueAggregatorReducer.java
Copyright Apache License 2.0
Author : yncxcw
/**
 * @param key
 *        the key is expected to be a Text object, whose prefix indicates
 *        the type of aggregation to aggregate the values. In effect, data
 *        driven computing is achieved. It is replacedumed that each aggregator's
 *        getReport method emits appropriate output for the aggregator. This
 *        may be further customized.
 * @param values the values to be aggregated
 * @param context
 */
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
    String keyStr = key.toString();
    int pos = keyStr.indexOf(ValueAggregatorDescriptor.TYPE_SEPARATOR);
    String type = keyStr.substring(0, pos);
    keyStr = keyStr.substring(pos + ValueAggregatorDescriptor.TYPE_SEPARATOR.length());
    long uniqCount = context.getConfiguration().getLong(UniqValueCount.MAX_NUM_UNIQUE_VALUES, Long.MAX_VALUE);
    ValueAggregator aggregator = ValueAggregatorBaseDescriptor.generateValueAggregator(type, uniqCount);
    for (Text value : values) {
        aggregator.addNextValue(value);
    }
    String val = aggregator.getReport();
    key = new Text(keyStr);
    context.write(key, new Text(val));
}

18 View Complete Implementation : TypedBytesWritableOutput.java
Copyright Apache License 2.0
Author : yncxcw
public void writeText(Text t) throws IOException {
    out.writeString(t.toString());
}

18 View Complete Implementation : Hadoop20JHParser.java
Copyright Apache License 2.0
Author : apache
private String getOneLine() throws IOException {
    Text resultText = new Text();
    if (reader.readLine(resultText) == 0) {
        throw new EOFException("apparent bad line");
    }
    return resultText.toString();
}

18 View Complete Implementation : ID3FinalClassifyingMapper.java
Copyright Apache License 2.0
Author : suhyunjeon
@Override
protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    String[] columns = value.toString().split(m_delimiter);
    int ruleIndex = -1;
    String clreplacedStr = "";
    int preMatchCnt = 0;
    for (int i = 0; i < ruleConditionList.size(); i++) {
        String[] condList = ruleConditionList.get(i);
        int condCnt = condList.length / 2;
        int matchCnt = isMatchCnt(condList, condCnt, columns);
        if (matchCnt == condCnt) {
            clreplacedStr = clreplacedList.get(i);
            ruleIndex = i;
        }
        if (matchCnt < preMatchCnt)
            break;
        preMatchCnt = matchCnt;
    }
    context.write(NullWritable.get(), new Text(value + m_delimiter + clreplacedStr));
}

18 View Complete Implementation : Hadoop20JHParser.java
Copyright Apache License 2.0
Author : yncxcw
private String getOneLine() throws IOException {
    Text resultText = new Text();
    if (reader.readLine(resultText) == 0) {
        throw new EOFException("apparent bad line");
    }
    return resultText.toString();
}

18 View Complete Implementation : TestPBHelper.java
Copyright Apache License 2.0
Author : yncxcw
@Test
public void testConvertText() {
    Text t = new Text("abc".getBytes());
    String s = t.toString();
    Text t1 = new Text(s);
    replacedertEquals(t, t1);
}

18 View Complete Implementation : AccumulatingReducer.java
Copyright Apache License 2.0
Author : aliyun-beta
public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
    String field = key.toString();
    reporter.setStatus("starting " + field + " ::host = " + hostName);
    // concatenate strings
    if (field.startsWith(VALUE_TYPE_STRING)) {
        StringBuffer sSum = new StringBuffer();
        while (values.hasNext()) sSum.append(values.next().toString()).append(";");
        output.collect(key, new Text(sSum.toString()));
        reporter.setStatus("finished " + field + " ::host = " + hostName);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_FLOAT)) {
        float fSum = 0;
        while (values.hasNext()) fSum += Float.parseFloat(values.next().toString());
        output.collect(key, new Text(String.valueOf(fSum)));
        reporter.setStatus("finished " + field + " ::host = " + hostName);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_LONG)) {
        long lSum = 0;
        while (values.hasNext()) {
            lSum += Long.parseLong(values.next().toString());
        }
        output.collect(key, new Text(String.valueOf(lSum)));
    }
    reporter.setStatus("finished " + field + " ::host = " + hostName);
}

18 View Complete Implementation : RMDelegationTokenSelector.java
Copyright Apache License 2.0
Author : yncxcw
private boolean checkService(Text service, Token<? extends TokenIdentifier> token) {
    if (service == null || token.getService() == null) {
        return false;
    }
    return token.getService().toString().contains(service.toString());
}

18 View Complete Implementation : TestStatsSerde.java
Copyright Apache License 2.0
Author : bunnyg
private void deserializeAndSerializeLazySimple(LazySimpleSerDe serDe, Text t) throws SerDeException {
    // Get the row structure
    StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
    // Deserialize
    Object row = serDe.deserialize(t);
    replacedertEquals("serialized size correct after deserialization", serDe.getSerDeStats().getRawDataSize(), t.getLength());
    // Serialize
    Text serializedText = (Text) serDe.serialize(row, oi);
    replacedertEquals("serialized size correct after serialization", serDe.getSerDeStats().getRawDataSize(), serializedText.toString().length());
}

18 View Complete Implementation : AccumulatingReducer.java
Copyright Apache License 2.0
Author : apache
public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
    String field = key.toString();
    reporter.setStatus("starting " + field + " ::host = " + hostName);
    // concatenate strings
    if (field.startsWith(VALUE_TYPE_STRING)) {
        StringBuffer sSum = new StringBuffer();
        while (values.hasNext()) sSum.append(values.next().toString()).append(";");
        output.collect(key, new Text(sSum.toString()));
        reporter.setStatus("finished " + field + " ::host = " + hostName);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_FLOAT)) {
        float fSum = 0;
        while (values.hasNext()) fSum += Float.parseFloat(values.next().toString());
        output.collect(key, new Text(String.valueOf(fSum)));
        reporter.setStatus("finished " + field + " ::host = " + hostName);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_LONG)) {
        long lSum = 0;
        while (values.hasNext()) {
            lSum += Long.parseLong(values.next().toString());
        }
        output.collect(key, new Text(String.valueOf(lSum)));
    }
    reporter.setStatus("finished " + field + " ::host = " + hostName);
}

18 View Complete Implementation : AMRMTokenSelector.java
Copyright Apache License 2.0
Author : apache
private boolean checkService(Text service, Token<? extends TokenIdentifier> token) {
    if (service == null || token.getService() == null) {
        return false;
    }
    return token.getService().toString().contains(service.toString());
}

18 View Complete Implementation : AMRMTokenSelector.java
Copyright Apache License 2.0
Author : aliyun-beta
private boolean checkService(Text service, Token<? extends TokenIdentifier> token) {
    if (service == null || token.getService() == null) {
        return false;
    }
    return token.getService().toString().contains(service.toString());
}

18 View Complete Implementation : RegexSerDe.java
Copyright Apache License 2.0
Author : bunnyg
@Override
public Object deserialize(Writable blob) throws SerDeException {
    Text rowText = (Text) blob;
    Matcher m = inputPattern.matcher(rowText.toString());
    if (m.groupCount() != numColumns) {
        throw new SerDeException("Number of matching groups doesn't match the number of columns");
    }
    // If do not match, ignore the line, return a row with all nulls.
    if (!m.matches()) {
        unmatchedRowsCount++;
        if (!alreadyLoggedNoMatch) {
            // Report the row if its the first time
            LOG.warn("" + unmatchedRowsCount + " unmatched rows are found: " + rowText);
            alreadyLoggedNoMatch = true;
        }
        return null;
    }
    // Otherwise, return the row.
    for (int c = 0; c < numColumns; c++) {
        try {
            row.set(c, m.group(c + 1));
        } catch (RuntimeException e) {
            partialMatchedRowsCount++;
            if (!alreadyLoggedPartialMatch) {
                // Report the row if its the first time
                LOG.warn("" + partialMatchedRowsCount + " partially unmatched rows are found, " + " cannot find group " + c + ": " + rowText);
                alreadyLoggedPartialMatch = true;
            }
            row.set(c, null);
        }
    }
    return row;
}

18 View Complete Implementation : UDFToFloat.java
Copyright Apache License 2.0
Author : bunnyg
/**
 * Convert from string to a float. This is called for CAST(... AS FLOAT)
 *
 * @param i
 *          The string value to convert
 * @return FloatWritable
 */
public FloatWritable evaluate(Text i) {
    if (i == null) {
        return null;
    } else {
        try {
            floatWritable.set(Float.valueOf(i.toString()));
            return floatWritable;
        } catch (NumberFormatException e) {
            // MySQL returns 0 if the string is not a well-formed numeric value.
            // But we decided to return NULL instead, which is more conservative.
            return null;
        }
    }
}

18 View Complete Implementation : FieldSelectionReducer.java
Copyright Apache License 2.0
Author : apache
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
    String keyStr = key.toString() + this.fieldSeparator;
    for (Text val : values) {
        FieldSelectionHelper helper = new FieldSelectionHelper();
        helper.extractOutputKeyValue(keyStr, val.toString(), fieldSeparator, reduceOutputKeyFieldList, reduceOutputValueFieldList, allReduceValueFieldsFrom, false, false);
        context.write(helper.getKey(), helper.getValue());
    }
}

18 View Complete Implementation : RMDelegationTokenSelector.java
Copyright Apache License 2.0
Author : aliyun-beta
private boolean checkService(Text service, Token<? extends TokenIdentifier> token) {
    if (service == null || token.getService() == null) {
        return false;
    }
    return token.getService().toString().contains(service.toString());
}

18 View Complete Implementation : TLD.java
Copyright Apache License 2.0
Author : NationalSecurityAgency
/**
 * In a rebuild situation build the start key for the next TLD.
 *
 * @param docKey
 *            - start key for the current range
 * @return - a key usable for a new start range.
 */
public static Key getNextParentKey(Key docKey) {
    Text startCF = docKey.getColumnFamily();
    if (startCF.find(NULL) != -1) {
        // we have a start key with a doreplacedent uid, add to the end of the cf to ensure we go to the next doc
        // parse out the uid
        String cf = startCF.toString();
        int index = cf.indexOf('\0');
        if (index >= 0) {
            String uid = cf.substring(index + 1);
            int index2 = uid.indexOf('\0');
            if (index2 >= 0) {
                uid = uid.substring(0, index2);
            }
            // if we do not have an empty uid
            if (!uid.isEmpty()) {
                uid = TLD.parseRootPointerFromId(uid);
                // to get to the next doc, add the separator for the UID 'extra' (child doc) portion and then the max unicode string
                Text nextDoc = new Text(cf.substring(0, index) + NULL + uid + DEFAULT_SEPARATOR + MAX_UNICODE_STRING);
                docKey = new Key(docKey.getRow(), nextDoc, docKey.getColumnQualifier(), docKey.getColumnVisibility(), docKey.getTimestamp());
            }
        }
    }
    return docKey;
}

18 View Complete Implementation : UDFRe2JRegexpLike.java
Copyright Apache License 2.0
Author : aaronshan
public boolean evaluate(Text text, Text pattern) {
    if (text == null) {
        return false;
    }
    if (re2JRegexp == null) {
        re2JRegexp = new Re2JRegexp(Integer.MAX_VALUE, 5, Slices.utf8Slice(pattern.toString()));
    }
    return re2JRegexp.matches(Slices.utf8Slice(text.toString()));
}

18 View Complete Implementation : ValueAggregatorReducer.java
Copyright Apache License 2.0
Author : apache
/**
 * @param key
 *        the key is expected to be a Text object, whose prefix indicates
 *        the type of aggregation to aggregate the values. In effect, data
 *        driven computing is achieved. It is replacedumed that each aggregator's
 *        getReport method emits appropriate output for the aggregator. This
 *        may be further customized.
 * @param values the values to be aggregated
 * @param context
 */
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
    String keyStr = key.toString();
    int pos = keyStr.indexOf(ValueAggregatorDescriptor.TYPE_SEPARATOR);
    String type = keyStr.substring(0, pos);
    keyStr = keyStr.substring(pos + ValueAggregatorDescriptor.TYPE_SEPARATOR.length());
    long uniqCount = context.getConfiguration().getLong(UniqValueCount.MAX_NUM_UNIQUE_VALUES, Long.MAX_VALUE);
    ValueAggregator aggregator = ValueAggregatorBaseDescriptor.generateValueAggregator(type, uniqCount);
    for (Text value : values) {
        aggregator.addNextValue(value);
    }
    String val = aggregator.getReport();
    key = new Text(keyStr);
    context.write(key, new Text(val));
}

18 View Complete Implementation : Hadoop20JHParser.java
Copyright Apache License 2.0
Author : aliyun-beta
private String getOneLine() throws IOException {
    Text resultText = new Text();
    if (reader.readLine(resultText) == 0) {
        throw new EOFException("apparent bad line");
    }
    return resultText.toString();
}

18 View Complete Implementation : YarnClientImpl.java
Copyright Apache License 2.0
Author : yncxcw
@Override
public Token getRMDelegationToken(Text renewer) throws YarnException, IOException {
    /* get the token from RM */
    GetDelegationTokenRequest rmDTRequest = Records.newRecord(GetDelegationTokenRequest.clreplaced);
    rmDTRequest.setRenewer(renewer.toString());
    GetDelegationTokenResponse response = rmClient.getDelegationToken(rmDTRequest);
    return response.getRMDelegationToken();
}

18 View Complete Implementation : IOMapperBase.java
Copyright Apache License 2.0
Author : apache
/**
 * Map file name and offset into statistical data.
 * <p>
 * The map task is to get the
 * <tt>key</tt>, which contains the file name, and the
 * <tt>value</tt>, which is the offset within the file.
 *
 * The parameters are preplaceded to the abstract method
 * {@link #doIO(Reporter,String,long)}, which performs the io operation,
 * usually read or write data, and then
 * {@link #collectStats(OutputCollector,String,long,Object)}
 * is called to prepare stat data for a subsequent reducer.
 */
public void map(Text key, LongWritable value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
    String name = key.toString();
    long longValue = value.get();
    reporter.setStatus("starting " + name + " ::host = " + hostName);
    this.stream = getIOStream(name);
    T statValue = null;
    long tStart = System.currentTimeMillis();
    try {
        statValue = doIO(reporter, name, longValue);
    } finally {
        if (stream != null)
            stream.close();
    }
    long tEnd = System.currentTimeMillis();
    long execTime = tEnd - tStart;
    collectStats(output, name, execTime, statValue);
    reporter.setStatus("finished " + name + " ::host = " + hostName);
}

18 View Complete Implementation : AMRMTokenSelector.java
Copyright Apache License 2.0
Author : yncxcw
private boolean checkService(Text service, Token<? extends TokenIdentifier> token) {
    if (service == null || token.getService() == null) {
        return false;
    }
    return token.getService().toString().contains(service.toString());
}

18 View Complete Implementation : ValueAggregatorReducer.java
Copyright Apache License 2.0
Author : aliyun-beta
/**
 * @param key
 *        the key is expected to be a Text object, whose prefix indicates
 *        the type of aggregation to aggregate the values. In effect, data
 *        driven computing is achieved. It is replacedumed that each aggregator's
 *        getReport method emits appropriate output for the aggregator. This
 *        may be further customized.
 * @param values the values to be aggregated
 * @param context
 */
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
    String keyStr = key.toString();
    int pos = keyStr.indexOf(ValueAggregatorDescriptor.TYPE_SEPARATOR);
    String type = keyStr.substring(0, pos);
    keyStr = keyStr.substring(pos + ValueAggregatorDescriptor.TYPE_SEPARATOR.length());
    long uniqCount = context.getConfiguration().getLong(UniqValueCount.MAX_NUM_UNIQUE_VALUES, Long.MAX_VALUE);
    ValueAggregator aggregator = ValueAggregatorBaseDescriptor.generateValueAggregator(type, uniqCount);
    for (Text value : values) {
        aggregator.addNextValue(value);
    }
    String val = aggregator.getReport();
    key = new Text(keyStr);
    context.write(key, new Text(val));
}

18 View Complete Implementation : FieldSelectionReducer.java
Copyright Apache License 2.0
Author : yncxcw
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
    String keyStr = key.toString() + this.fieldSeparator;
    for (Text val : values) {
        FieldSelectionHelper helper = new FieldSelectionHelper();
        helper.extractOutputKeyValue(keyStr, val.toString(), fieldSeparator, reduceOutputKeyFieldList, reduceOutputValueFieldList, allReduceValueFieldsFrom, false, false);
        context.write(helper.getKey(), helper.getValue());
    }
}

18 View Complete Implementation : RMDelegationTokenSelector.java
Copyright Apache License 2.0
Author : apache
private boolean checkService(Text service, Token<? extends TokenIdentifier> token) {
    if (service == null || token.getService() == null) {
        return false;
    }
    return token.getService().toString().contains(service.toString());
}

18 View Complete Implementation : RegexSerDe.java
Copyright Apache License 2.0
Author : bunnyg
@Override
public Object deserialize(Writable blob) throws SerDeException {
    if (inputPattern == null) {
        throw new SerDeException("This table does not have serde property \"input.regex\"!");
    }
    Text rowText = (Text) blob;
    Matcher m = inputPattern.matcher(rowText.toString());
    // If do not match, ignore the line, return a row with all nulls.
    if (!m.matches()) {
        unmatchedRows++;
        if (unmatchedRows >= nextUnmatchedRows) {
            nextUnmatchedRows = getNextNumberToDisplay(nextUnmatchedRows);
            // Report the row
            LOG.warn("" + unmatchedRows + " unmatched rows are found: " + rowText);
        }
        return null;
    }
    // Otherwise, return the row.
    for (int c = 0; c < numColumns; c++) {
        try {
            row.set(c, m.group(c + 1));
        } catch (RuntimeException e) {
            partialMatchedRows++;
            if (partialMatchedRows >= nextPartialMatchedRows) {
                nextPartialMatchedRows = getNextNumberToDisplay(nextPartialMatchedRows);
                // Report the row
                LOG.warn("" + partialMatchedRows + " partially unmatched rows are found, " + " cannot find group " + c + ": " + rowText);
            }
            row.set(c, null);
        }
    }
    return row;
}

18 View Complete Implementation : IOMapperBase.java
Copyright Apache License 2.0
Author : aliyun-beta
/**
 * Map file name and offset into statistical data.
 * <p>
 * The map task is to get the
 * <tt>key</tt>, which contains the file name, and the
 * <tt>value</tt>, which is the offset within the file.
 *
 * The parameters are preplaceded to the abstract method
 * {@link #doIO(Reporter,String,long)}, which performs the io operation,
 * usually read or write data, and then
 * {@link #collectStats(OutputCollector,String,long,Object)}
 * is called to prepare stat data for a subsequent reducer.
 */
public void map(Text key, LongWritable value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
    String name = key.toString();
    long longValue = value.get();
    reporter.setStatus("starting " + name + " ::host = " + hostName);
    this.stream = getIOStream(name);
    T statValue = null;
    long tStart = System.currentTimeMillis();
    try {
        statValue = doIO(reporter, name, longValue);
    } finally {
        if (stream != null)
            stream.close();
    }
    long tEnd = System.currentTimeMillis();
    long execTime = tEnd - tStart;
    collectStats(output, name, execTime, statValue);
    reporter.setStatus("finished " + name + " ::host = " + hostName);
}

18 View Complete Implementation : FieldSelectionReducer.java
Copyright Apache License 2.0
Author : aliyun-beta
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
    String keyStr = key.toString() + this.fieldSeparator;
    for (Text val : values) {
        FieldSelectionHelper helper = new FieldSelectionHelper();
        helper.extractOutputKeyValue(keyStr, val.toString(), fieldSeparator, reduceOutputKeyFieldList, reduceOutputValueFieldList, allReduceValueFieldsFrom, false, false);
        context.write(helper.getKey(), helper.getValue());
    }
}

18 View Complete Implementation : UDFToDouble.java
Copyright Apache License 2.0
Author : bunnyg
/**
 * Convert from string to a double. This is called for CAST(... AS DOUBLE)
 *
 * @param i
 *          The string value to convert
 * @return DoubleWritable
 */
public DoubleWritable evaluate(Text i) {
    if (i == null) {
        return null;
    } else {
        try {
            doubleWritable.set(Double.valueOf(i.toString()));
            return doubleWritable;
        } catch (NumberFormatException e) {
            // MySQL returns 0 if the string is not a well-formed double value.
            // But we decided to return NULL instead, which is more conservative.
            return null;
        }
    }
}

18 View Complete Implementation : TypedBytesWritableOutput.java
Copyright Apache License 2.0
Author : apache
public void writeText(Text t) throws IOException {
    out.writeString(t.toString());
}

18 View Complete Implementation : TypedBytesWritableOutput.java
Copyright Apache License 2.0
Author : aliyun-beta
public void writeText(Text t) throws IOException {
    out.writeString(t.toString());
}

18 View Complete Implementation : YarnClientImpl.java
Copyright Apache License 2.0
Author : aliyun-beta
@Override
public Token getRMDelegationToken(Text renewer) throws YarnException, IOException {
    /* get the token from RM */
    GetDelegationTokenRequest rmDTRequest = Records.newRecord(GetDelegationTokenRequest.clreplaced);
    rmDTRequest.setRenewer(renewer.toString());
    GetDelegationTokenResponse response = rmClient.getDelegationToken(rmDTRequest);
    return response.getRMDelegationToken();
}

17 View Complete Implementation : TestDtUtilShell.java
Copyright Apache License 2.0
Author : apache
@Test
public void testPrint() throws Exception {
    args = new String[] { "print", tokenFilename };
    rc = dt.run(args);
    replacedertEquals("test simple print exit code", 0, rc);
    replacedertTrue("test simple print output kind:\n" + outContent.toString(), outContent.toString().contains(KIND.toString()));
    replacedertTrue("test simple print output service:\n" + outContent.toString(), outContent.toString().contains(SERVICE.toString()));
    outContent.reset();
    args = new String[] { "print", tokenLegacyFile.toString() };
    rc = dt.run(args);
    replacedertEquals("test legacy print exit code", 0, rc);
    replacedertTrue("test simple print output kind:\n" + outContent.toString(), outContent.toString().contains(KIND.toString()));
    replacedertTrue("test simple print output service:\n" + outContent.toString(), outContent.toString().contains(SERVICE.toString()));
    outContent.reset();
    args = new String[] { "print", "-alias", SERVICE.toString(), tokenFilename };
    rc = dt.run(args);
    replacedertEquals("test alias print exit code", 0, rc);
    replacedertTrue("test simple print output kind:\n" + outContent.toString(), outContent.toString().contains(KIND.toString()));
    replacedertTrue("test simple print output service:\n" + outContent.toString(), outContent.toString().contains(SERVICE.toString()));
    outContent.reset();
    args = new String[] { "print", "-alias", "not-a-serivce", tokenFilename };
    rc = dt.run(args);
    replacedertEquals("test no alias print exit code", 0, rc);
    replacedertFalse("test no alias print output kind:\n" + outContent.toString(), outContent.toString().contains(KIND.toString()));
    replacedertFalse("test no alias print output service:\n" + outContent.toString(), outContent.toString().contains(SERVICE.toString()));
}

17 View Complete Implementation : TestDtUtilShell.java
Copyright Apache License 2.0
Author : apache
@Test
public void testGet() throws Exception {
    args = new String[] { "get", getUrl, tokenFilenameGet };
    rc = dt.run(args);
    replacedertEquals("test mocked get exit code", 0, rc);
    args = new String[] { "print", tokenFilenameGet };
    rc = dt.run(args);
    String oc = outContent.toString();
    replacedertEquals("test print after get exit code", 0, rc);
    replacedertTrue("test print after get output kind:\n" + oc, oc.contains(KIND_GET.toString()));
    replacedertTrue("test print after get output service:\n" + oc, oc.contains(SERVICE_GET.toString()));
}

17 View Complete Implementation : TestClientRMProxy.java
Copyright Apache License 2.0
Author : apache
@Test
public void testGetRMDelegationTokenService() {
    String defaultRMAddress = YarnConfiguration.DEFAULT_RM_ADDRESS;
    YarnConfiguration conf = new YarnConfiguration();
    // HA is not enabled
    Text tokenService = ClientRMProxy.getRMDelegationTokenService(conf);
    String[] services = tokenService.toString().split(",");
    replacedertEquals(1, services.length);
    for (String service : services) {
        replacedertTrue("Incorrect token service name", service.contains(defaultRMAddress));
    }
    // HA is enabled
    conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
    conf.set(YarnConfiguration.RM_HA_IDS, "rm1,rm2");
    conf.set(HAUtil.addSuffix(YarnConfiguration.RM_HOSTNAME, "rm1"), "0.0.0.0");
    conf.set(HAUtil.addSuffix(YarnConfiguration.RM_HOSTNAME, "rm2"), "0.0.0.0");
    tokenService = ClientRMProxy.getRMDelegationTokenService(conf);
    services = tokenService.toString().split(",");
    replacedertEquals(2, services.length);
    for (String service : services) {
        replacedertTrue("Incorrect token service name", service.contains(defaultRMAddress));
    }
}