org.apache.avro.Schema.getNamespace() - java examples

Here are the examples of the java api org.apache.avro.Schema.getNamespace() taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

69 Examples 7

16 View Complete Implementation : NetSuiteDatasetRuntimeImpl.java
Copyright Apache License 2.0
Author : Talend
/**
 * Extend a schema with additional fields.
 *
 * @param sourceSchema source schema
 * @param newSchemaName name of new schema
 * @param fieldsToAdd fields to be added
 * @return new schema
 */
public static Schema extendSchema(Schema sourceSchema, String newSchemaName, List<Schema.Field> fieldsToAdd) {
    Schema newSchema = Schema.createRecord(newSchemaName, sourceSchema.getDoc(), sourceSchema.getNamespace(), sourceSchema.isError());
    List<Schema.Field> copyFieldList = new ArrayList<>();
    for (Schema.Field se : sourceSchema.getFields()) {
        Schema.Field field = copyField(se);
        copyFieldList.add(field);
    }
    copyFieldList.addAll(fieldsToAdd);
    newSchema.setFields(copyFieldList);
    for (Map.Entry<String, Object> entry : sourceSchema.getObjectProps().entrySet()) {
        newSchema.addProp(entry.getKey(), entry.getValue());
    }
    return newSchema;
}

16 View Complete Implementation : CodeGenerator.java
Copyright BSD 2-Clause "Simplified" License
Author : linkedin
private void validateParsedSchemas(Map<String, SchemaDetails> parsedSchemas, Map<File, File> fileToParent) {
    for (Map.Entry<String, SchemaDetails> entry : parsedSchemas.entrySet()) {
        String fqcn = entry.getKey();
        SchemaDetails schemaDetails = entry.getValue();
        if (!schemaDetails.isTopLevel()) {
            continue;
        }
        Schema schema = schemaDetails.getSchema();
        File file = schemaDetails.getLocation();
        File root = fileToParent.get(file);
        if (validateSchemaNamespaceVsFilePath) {
            String namespace = schema.getNamespace();
            String relativePath;
            if (root == file) {
                relativePath = "";
            } else {
                relativePath = root.toPath().relativize(file.toPath().getParent()).toString().replaceAll(Pattern.quote(File.pathSeparator), ".");
            }
            if (namespace == null) {
                if (!relativePath.equals("")) {
                    throw new IllegalArgumentException("schema " + fqcn + " has no namespace yet is defined in " + file + " who's relative path to root is " + relativePath);
                }
            } else {
                if (!relativePath.equals(namespace)) {
                    throw new IllegalArgumentException("schema " + fqcn + " belongs to namespace " + namespace + " yet is defined in " + file + " who's relative path to root is " + relativePath);
                }
            }
        }
        if (validateSchemaNameVsFileName) {
            String name = schema.getName();
            String fileName = FilenameUtils.removeExtension(file.getName());
            if (!fileName.equals(name)) {
                throw new IllegalArgumentException("schema " + fqcn + " has name " + name + " yet is defined in a file called " + file.getName());
            }
        }
    }
}

15 View Complete Implementation : TestCSVSchemaInference.java
Copyright Apache License 2.0
Author : kite-sdk
@Test
public void testSchemaNamespace() throws Exception {
    InputStream stream = new ByteArrayInputStream(csvLines.getBytes("utf8"));
    Schema schema = CSVUtil.inferNullableSchema("com.example.TestRecord", stream, new CSVProperties.Builder().hasHeader().build());
    replacedert.replacedertEquals("Should use name", "TestRecord", schema.getName());
    replacedert.replacedertEquals("Should set namespace", "com.example", schema.getNamespace());
}

14 View Complete Implementation : AvroUtils.java
Copyright Apache License 2.0
Author : apache
/**
 * Copies the input {@link org.apache.avro.Schema} but changes the schema name.
 * @param schema {@link org.apache.avro.Schema} to copy.
 * @param newName name for the copied {@link org.apache.avro.Schema}.
 * @return A {@link org.apache.avro.Schema} that is a copy of schema, but has the name newName.
 */
public static Schema switchName(Schema schema, String newName) {
    if (schema.getName().equals(newName)) {
        return schema;
    }
    Schema newSchema = Schema.createRecord(newName, schema.getDoc(), schema.getNamespace(), schema.isError());
    List<Field> fields = schema.getFields();
    Iterable<Field> fieldsNew = Iterables.transform(fields, new Function<Field, Field>() {

        @Override
        public Schema.Field apply(Field input) {
            // this should never happen but the API has marked input as Nullable
            if (null == input) {
                return null;
            }
            Field field = new Field(input.name(), input.schema(), input.doc(), input.defaultValue(), input.order());
            return field;
        }
    });
    newSchema.setFields(Lists.newArrayList(fieldsNew));
    return newSchema;
}

13 View Complete Implementation : GenerateAvroSchemaTask.java
Copyright Apache License 2.0
Author : davidmc24
private void processProtoFile(File sourceFile) {
    getLogger().info("Processing {}", sourceFile);
    try {
        Protocol protocol = Protocol.parse(sourceFile);
        for (Schema schema : protocol.getTypes()) {
            String path = schema.getNamespace().replaceAll(Pattern.quote("."), "/");
            File schemaFile = new File(getOutputDir().get().getAsFile(), path + "/" + schema.getName() + "." + SCHEMA_EXTENSION);
            String schemaJson = schema.toString(true);
            FileUtils.writeJsonFile(schemaFile, schemaJson);
            getLogger().debug("Wrote {}", schemaFile.getPath());
        }
    } catch (IOException ex) {
        throw new GradleException(String.format("Failed to process protocol definition file %s", sourceFile), ex);
    }
}

12 View Complete Implementation : AvroUtils.java
Copyright Apache License 2.0
Author : apache
private static Optional<Schema> removeUncomparableFieldsFromRecord(Schema record, Map<Schema, Optional<Schema>> processed) {
    Preconditions.checkArgument(record.getType() == Schema.Type.RECORD);
    Optional<Schema> result = processed.get(record);
    if (null != result) {
        return result;
    }
    List<Field> fields = Lists.newArrayList();
    for (Field field : record.getFields()) {
        Optional<Schema> newFieldSchema = removeUncomparableFields(field.schema(), processed);
        if (newFieldSchema.isPresent()) {
            fields.add(new Field(field.name(), newFieldSchema.get(), field.doc(), field.defaultValue()));
        }
    }
    Schema newSchema = Schema.createRecord(record.getName(), record.getDoc(), record.getNamespace(), false);
    newSchema.setFields(fields);
    result = Optional.of(newSchema);
    processed.put(record, result);
    return result;
}

12 View Complete Implementation : SchemaGeneratorUtils.java
Copyright Apache License 2.0
Author : Talend
/**
 * Generate the Avro schema from a tree representation of the schema.
 *
 * @param tree Hashmap representing a tree generated by the method generateTree()
 * @param elementToGenerate the current part of the tree that will be generated.
 * @return
 */
public static Schema convertTreeToAvroSchema(Map<String, Set<Object>> tree, String elementToGenerate, Schema inputSchema) {
    List<Schema.Field> fieldList = new ArrayList<>();
    if (tree.containsKey(elementToGenerate)) {
        for (Object treeElement : tree.get(elementToGenerate)) {
            if (treeElement instanceof String) {
                // path element, generate the schema of the subtree then add it as a field.
                Schema subElementSchema = convertTreeToAvroSchema(tree, (String) treeElement, inputSchema);
                String elementName = (String) treeElement;
                if (elementName.contains(".")) {
                    elementName = StringUtils.substringAfterLast(elementName, ".");
                }
                fieldList.add(new Field(elementName, subElementSchema, "", ""));
            } else if (treeElement instanceof Field) {
                // field element, adding it to the field list.
                fieldList.add((Field) treeElement);
            } else {
                TalendRuntimeException.build(CommonErrorCodes.UNEXPECTED_ARGUMENT).setAndThrow("Should be only String or Field", treeElement.getClreplaced().toString());
            }
        }
    } else {
        if (!TREE_ROOT_DEFAULT_VALUE.equals(elementToGenerate)) {
            TalendRuntimeException.build(CommonErrorCodes.UNEXPECTED_ARGUMENT).setAndThrow(tree.keySet().toString(), elementToGenerate);
        }
    }
    try {
        if (inputSchema == null) {
            return Schema.createRecord(fieldList);
        } else if ("$".equals(elementToGenerate)) {
            return Schema.createRecord(inputSchema.getName(), inputSchema.getDoc(), inputSchema.getNamespace(), inputSchema.isError(), fieldList);
        } else if (retrieveFieldFromJsonPath(inputSchema, elementToGenerate) != null) {
            // If the field exist in the inputSchema, copy its schema
            Schema currentSchema = retrieveFieldFromJsonPath(inputSchema, elementToGenerate).schema();
            return Schema.createRecord(currentSchema.getName(), currentSchema.getDoc(), currentSchema.getNamespace(), currentSchema.isError(), fieldList);
        } else {
            return Schema.createRecord(fieldList);
        }
    } catch (AvroRuntimeException e) {
        // this will be throw if we are trying to get the name of an anonymous type
        return Schema.createRecord(fieldList);
    }
}

12 View Complete Implementation : TestCSVSchemaInference.java
Copyright Apache License 2.0
Author : kite-sdk
@Test
public void testSchemaInference() throws Exception {
    InputStream stream = new ByteArrayInputStream(csvLines.getBytes("utf8"));
    Schema schema = CSVUtil.inferSchema("TestRecord", stream, new CSVProperties.Builder().hasHeader().build());
    replacedert.replacedertEquals("Should use name", "TestRecord", schema.getName());
    replacedert.replacedertNull("Should not have namespace", schema.getNamespace());
    replacedert.replacedertNotNull(schema.getField("long"));
    replacedert.replacedertNotNull(schema.getField("float"));
    replacedert.replacedertNotNull(schema.getField("double"));
    replacedert.replacedertNotNull(schema.getField("double2"));
    replacedert.replacedertNotNull(schema.getField("string"));
    replacedert.replacedertNotNull(schema.getField("nullable_long"));
    replacedert.replacedertNotNull(schema.getField("nullable_string"));
    replacedert.replacedertEquals("Should infer a long", schema(Schema.Type.LONG), schema.getField("long").schema());
    replacedert.replacedertEquals("Should infer a float (ends in f)", schema(Schema.Type.FLOAT), schema.getField("float").schema());
    replacedert.replacedertEquals("Should infer a double (ends in d)", nullable(Schema.Type.DOUBLE), schema.getField("double").schema());
    replacedert.replacedertEquals("Should infer a double (decimal defaults to double)", nullable(Schema.Type.DOUBLE), schema.getField("double2").schema());
    replacedert.replacedertEquals("Should infer a non-null string (not numeric)", schema(Schema.Type.STRING), schema.getField("string").schema());
    replacedert.replacedertEquals("Should infer a nullable long (second line is a long)", nullable(Schema.Type.LONG), schema.getField("nullable_long").schema());
    replacedert.replacedertEquals("Should infer a nullable string (second is missing)", nullable(Schema.Type.STRING), schema.getField("nullable_string").schema());
}

12 View Complete Implementation : TSalesforceOutputProperties.java
Copyright Apache License 2.0
Author : Talend
private Schema newSchema(Schema metadataSchema, String newSchemaName, List<Schema.Field> moreFields) {
    Schema newSchema = Schema.createRecord(newSchemaName, metadataSchema.getDoc(), metadataSchema.getNamespace(), metadataSchema.isError());
    newSchema.setFields(moreFields);
    for (Map.Entry<String, Object> entry : metadataSchema.getObjectProps().entrySet()) {
        newSchema.addProp(entry.getKey(), entry.getValue());
    }
    return newSchema;
}

12 View Complete Implementation : PruneColumns.java
Copyright Apache License 2.0
Author : apache
private static Schema copyRecord(Schema record, List<Schema.Field> newFields) {
    Schema copy = Schema.createRecord(record.getName(), record.getDoc(), record.getNamespace(), record.isError(), newFields);
    for (Map.Entry<String, Object> prop : record.getObjectProps().entrySet()) {
        copy.addProp(prop.getKey(), prop.getValue());
    }
    return copy;
}

11 View Complete Implementation : TestCopyCommandClusterNewField.java
Copyright Apache License 2.0
Author : kite-sdk
@Override
public Schema getEvolvedSchema(Schema original) {
    List<Schema.Field> fields = Lists.newArrayList();
    fields.add(new Schema.Field("new", Schema.createUnion(ImmutableList.of(Schema.create(Schema.Type.NULL), Schema.create(Schema.Type.STRING))), "New field", NullNode.getInstance()));
    for (Schema.Field field : original.getFields()) {
        fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultValue()));
    }
    Schema evolved = Schema.createRecord(original.getName(), original.getDoc(), original.getNamespace(), false);
    evolved.setFields(fields);
    return evolved;
}

11 View Complete Implementation : TSnowflakeOutputProperties.java
Copyright Apache License 2.0
Author : Talend
private void updateOutputSchemas() {
    Schema inputSchema = table.main.schema.getValue();
    final List<Schema.Field> additionalRejectFields = new ArrayList<Schema.Field>();
    addSchemaField(FIELD_COLUMN_NAME, additionalRejectFields);
    addSchemaField(FIELD_ROW_NUMBER, additionalRejectFields);
    addSchemaField(FIELD_CATEGORY, additionalRejectFields);
    addSchemaField(FIELD_CHARACTER, additionalRejectFields);
    addSchemaField(FIELD_ERROR_MESSAGE, additionalRejectFields);
    addSchemaField(FIELD_BYTE_OFFSET, additionalRejectFields);
    addSchemaField(FIELD_LINE, additionalRejectFields);
    addSchemaField(FIELD_SQL_STATE, additionalRejectFields);
    addSchemaField(FIELD_CODE, additionalRejectFields);
    Schema rejectSchema = Schema.createRecord("rejectOutput", inputSchema.getDoc(), inputSchema.getNamespace(), inputSchema.isError());
    List<Schema.Field> copyFieldList = new ArrayList<>();
    copyFieldList.addAll(additionalRejectFields);
    rejectSchema.setFields(copyFieldList);
    schemaReject.schema.setValue(rejectSchema);
}

11 View Complete Implementation : TestCSVSchemaInference.java
Copyright Apache License 2.0
Author : kite-sdk
@Test
public void testNullableSchemaInference() throws Exception {
    InputStream stream = new ByteArrayInputStream(csvLines.getBytes("utf8"));
    Schema schema = CSVUtil.inferNullableSchema("TestRecord", stream, new CSVProperties.Builder().hasHeader().build(), ImmutableSet.of("float"));
    replacedert.replacedertEquals("Should use name", "TestRecord", schema.getName());
    replacedert.replacedertNull("Should not have namespace", schema.getNamespace());
    replacedert.replacedertNotNull(schema.getField("long"));
    replacedert.replacedertNotNull(schema.getField("float"));
    replacedert.replacedertNotNull(schema.getField("double"));
    replacedert.replacedertNotNull(schema.getField("double2"));
    replacedert.replacedertNotNull(schema.getField("string"));
    replacedert.replacedertNotNull(schema.getField("nullable_long"));
    replacedert.replacedertNotNull(schema.getField("nullable_string"));
    replacedert.replacedertEquals("Should infer a long", nullable(Schema.Type.LONG), schema.getField("long").schema());
    replacedert.replacedertEquals("Should infer a non-null float (required, ends in f)", schema(Schema.Type.FLOAT), schema.getField("float").schema());
    replacedert.replacedertEquals("Should infer a double (ends in d)", nullable(Schema.Type.DOUBLE), schema.getField("double").schema());
    replacedert.replacedertEquals("Should infer a double (decimal defaults to double)", nullable(Schema.Type.DOUBLE), schema.getField("double2").schema());
    replacedert.replacedertEquals("Should infer a string (not numeric)", nullable(Schema.Type.STRING), schema.getField("string").schema());
    replacedert.replacedertEquals("Should infer a long (second line is a long)", nullable(Schema.Type.LONG), schema.getField("nullable_long").schema());
    replacedert.replacedertEquals("Should infer a nullable string (second is missing)", nullable(Schema.Type.STRING), schema.getField("nullable_string").schema());
}

11 View Complete Implementation : MultiConverterTest.java
Copyright Apache License 2.0
Author : apache
private void checkConvertedAvroData(Schema schema, GenericRecord record) {
    replacedert.replacedertEquals(schema.getNamespace(), "example.avro");
    replacedert.replacedertEquals(schema.getType(), Schema.Type.RECORD);
    replacedert.replacedertEquals(schema.getName(), "User");
    replacedert.replacedertEquals(schema.getFields().size(), 3);
    Schema.Field nameField = schema.getField("name");
    replacedert.replacedertEquals(nameField.name(), "name");
    replacedert.replacedertEquals(nameField.schema().getType(), Schema.Type.STRING);
    Schema.Field favNumberField = schema.getField("favorite_number");
    replacedert.replacedertEquals(favNumberField.name(), "favorite_number");
    replacedert.replacedertEquals(favNumberField.schema().getType(), Schema.Type.INT);
    Schema.Field favColorField = schema.getField("favorite_color");
    replacedert.replacedertEquals(favColorField.name(), "favorite_color");
    replacedert.replacedertEquals(favColorField.schema().getType(), Schema.Type.STRING);
    replacedert.replacedertEquals(record.get("name"), "Alyssa");
    replacedert.replacedertEquals(record.get("favorite_number"), 256d);
    replacedert.replacedertEquals(record.get("favorite_color"), "yellow");
}

11 View Complete Implementation : AvroUtils.java
Copyright Apache License 2.0
Author : apache
/**
 * Inner recursive method called by {@link #dropRecursiveFields(Schema)}
 * @param schemaEntry
 * @param parents
 * @param fieldsWithRecursion
 * @return the transformed Schema, null if schema is recursive w.r.t parent schema traversed so far
 */
private static Schema dropRecursive(SchemaEntry schemaEntry, List<SchemaEntry> parents, List<SchemaEntry> fieldsWithRecursion) {
    Schema schema = schemaEntry.schema;
    // ignore primitive fields
    switch(schema.getType()) {
        case UNION:
            {
                List<Schema> unionTypes = schema.getTypes();
                List<Schema> copiedUnionTypes = new ArrayList<Schema>();
                for (Schema unionSchema : unionTypes) {
                    SchemaEntry unionSchemaEntry = new SchemaEntry(schemaEntry.fieldName, unionSchema);
                    copiedUnionTypes.add(dropRecursive(unionSchemaEntry, parents, fieldsWithRecursion));
                }
                if (copiedUnionTypes.stream().anyMatch(x -> x == null)) {
                    // one or more types in the union are referring to a parent type (directly recursive),
                    // entire union must be dropped
                    return null;
                } else {
                    Schema copySchema = Schema.createUnion(copiedUnionTypes);
                    copyProperties(schema, copySchema);
                    return copySchema;
                }
            }
        case RECORD:
            {
                // check if the type of this schema matches any in the parents list
                if (parents.stream().anyMatch(parent -> parent.fullyQualifiedType().equals(schemaEntry.fullyQualifiedType()))) {
                    fieldsWithRecursion.add(schemaEntry);
                    return null;
                }
                List<SchemaEntry> newParents = new ArrayList<>(parents);
                newParents.add(schemaEntry);
                List<Schema.Field> copiedSchemaFields = new ArrayList<>();
                for (Schema.Field field : schema.getFields()) {
                    String fieldName = schemaEntry.fieldName != null ? schemaEntry.fieldName + "." + field.name() : field.name();
                    SchemaEntry fieldSchemaEntry = new SchemaEntry(fieldName, field.schema());
                    Schema copiedFieldSchema = dropRecursive(fieldSchemaEntry, newParents, fieldsWithRecursion);
                    if (copiedFieldSchema == null) {
                    } else {
                        Schema.Field copiedField = new Schema.Field(field.name(), copiedFieldSchema, field.doc(), field.defaultValue(), field.order());
                        copyFieldProperties(field, copiedField);
                        copiedSchemaFields.add(copiedField);
                    }
                }
                if (copiedSchemaFields.size() > 0) {
                    Schema copiedRecord = Schema.createRecord(schema.getName(), schema.getDoc(), schema.getNamespace(), schema.isError());
                    copiedRecord.setFields(copiedSchemaFields);
                    copyProperties(schema, copiedRecord);
                    return copiedRecord;
                } else {
                    return null;
                }
            }
        case ARRAY:
            {
                Schema itemSchema = schema.getElementType();
                SchemaEntry itemSchemaEntry = new SchemaEntry(schemaEntry.fieldName, itemSchema);
                Schema copiedItemSchema = dropRecursive(itemSchemaEntry, parents, fieldsWithRecursion);
                if (copiedItemSchema == null) {
                    return null;
                } else {
                    Schema copiedArraySchema = Schema.createArray(copiedItemSchema);
                    copyProperties(schema, copiedArraySchema);
                    return copiedArraySchema;
                }
            }
        case MAP:
            {
                Schema valueSchema = schema.getValueType();
                SchemaEntry valueSchemaEntry = new SchemaEntry(schemaEntry.fieldName, valueSchema);
                Schema copiedValueSchema = dropRecursive(valueSchemaEntry, parents, fieldsWithRecursion);
                if (copiedValueSchema == null) {
                    return null;
                } else {
                    Schema copiedMapSchema = Schema.createMap(copiedValueSchema);
                    copyProperties(schema, copiedMapSchema);
                    return copiedMapSchema;
                }
            }
        default:
            {
                return schema;
            }
    }
}

11 View Complete Implementation : AvroUtils.java
Copyright Apache License 2.0
Author : apache
/**
 * Decorate the {@link Schema} for a record with additional {@link Field}s.
 * @param inputSchema: must be a {@link Record} schema.
 * @return the decorated Schema. Fields are appended to the inputSchema.
 */
public static Schema decorateRecordSchema(Schema inputSchema, @Nonnull List<Field> fieldList) {
    Preconditions.checkState(inputSchema.getType().equals(Type.RECORD));
    List<Field> outputFields = deepCopySchemaFields(inputSchema);
    List<Field> newOutputFields = Stream.concat(outputFields.stream(), fieldList.stream()).collect(Collectors.toList());
    Schema outputSchema = Schema.createRecord(inputSchema.getName(), inputSchema.getDoc(), inputSchema.getNamespace(), inputSchema.isError());
    outputSchema.setFields(newOutputFields);
    copyProperties(inputSchema, outputSchema);
    return outputSchema;
}

11 View Complete Implementation : AbstractRealtimeRecordReader.java
Copyright Apache License 2.0
Author : apache
/**
 * Generate a reader schema off the provided writeSchema, to just project out the provided columns.
 */
public static Schema generateProjectionSchema(Schema writeSchema, Map<String, Field> schemaFieldsMap, List<String> fieldNames) {
    /**
     * Avro & Presto field names seems to be case sensitive (support fields differing only in case) whereas
     * Hive/Impala/SparkSQL(default) are case-insensitive. Spark allows this to be configurable using
     * spark.sql.caseSensitive=true
     *
     * For a RT table setup with no delta-files (for a latest file-slice) -> we translate parquet schema to Avro Here
     * the field-name case is dependent on parquet schema. Hive (1.x/2.x/CDH) translate column projections to
     * lower-cases
     */
    List<Schema.Field> projectedFields = new ArrayList<>();
    for (String fn : fieldNames) {
        Schema.Field field = schemaFieldsMap.get(fn.toLowerCase());
        if (field == null) {
            throw new HoodieException("Field " + fn + " not found in log schema. Query cannot proceed! " + "Derived Schema Fields: " + new ArrayList<>(schemaFieldsMap.keySet()));
        } else {
            projectedFields.add(new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultValue()));
        }
    }
    Schema projectedSchema = Schema.createRecord(writeSchema.getName(), writeSchema.getDoc(), writeSchema.getNamespace(), writeSchema.isError());
    projectedSchema.setFields(projectedFields);
    return projectedSchema;
}

11 View Complete Implementation : AvroUtilsTest.java
Copyright Apache License 2.0
Author : apache
@Test
public void overrideSchemaNameAndNamespaceTest() {
    String inputName = "input_name";
    String inputNamespace = "input_namespace";
    String outputName = "output_name";
    String outputNamespace = "output_namespace";
    Schema inputSchema = SchemaBuilder.record(inputName).namespace(inputNamespace).fields().name("integer1").type().intBuilder().endInt().noDefault().endRecord();
    Map<String, String> namespaceOverrideMap = new HashMap<>();
    namespaceOverrideMap.put(inputNamespace, outputNamespace);
    Schema newSchema = AvroUtils.overrideNameAndNamespace(inputSchema, outputName, Optional.of(namespaceOverrideMap));
    replacedert.replacedertEquals(newSchema.getName(), outputName);
    replacedert.replacedertEquals(newSchema.getNamespace(), outputNamespace);
}

11 View Complete Implementation : HoodieAvroUtils.java
Copyright Apache License 2.0
Author : apache
/**
 * Adds the Hoodie metadata fields to the given schema.
 */
public static Schema addMetadataFields(Schema schema) {
    List<Schema.Field> parentFields = new ArrayList<>();
    Schema.Field commitTimeField = new Schema.Field(HoodieRecord.COMMIT_TIME_METADATA_FIELD, METADATA_FIELD_SCHEMA, "", NullNode.getInstance());
    Schema.Field commitSeqnoField = new Schema.Field(HoodieRecord.COMMIT_SEQNO_METADATA_FIELD, METADATA_FIELD_SCHEMA, "", NullNode.getInstance());
    Schema.Field recordKeyField = new Schema.Field(HoodieRecord.RECORD_KEY_METADATA_FIELD, METADATA_FIELD_SCHEMA, "", NullNode.getInstance());
    Schema.Field parreplacedionPathField = new Schema.Field(HoodieRecord.PARreplacedION_PATH_METADATA_FIELD, METADATA_FIELD_SCHEMA, "", NullNode.getInstance());
    Schema.Field fileNameField = new Schema.Field(HoodieRecord.FILENAME_METADATA_FIELD, METADATA_FIELD_SCHEMA, "", NullNode.getInstance());
    parentFields.add(commitTimeField);
    parentFields.add(commitSeqnoField);
    parentFields.add(recordKeyField);
    parentFields.add(parreplacedionPathField);
    parentFields.add(fileNameField);
    for (Schema.Field field : schema.getFields()) {
        if (!isMetadataField(field.name())) {
            Schema.Field newField = new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultValue());
            for (Map.Entry<String, JsonNode> prop : field.getJsonProps().entrySet()) {
                newField.addProp(prop.getKey(), prop.getValue());
            }
            parentFields.add(newField);
        }
    }
    Schema mergedSchema = Schema.createRecord(schema.getName(), schema.getDoc(), schema.getNamespace(), false);
    mergedSchema.setFields(parentFields);
    return mergedSchema;
}

10 View Complete Implementation : AvroHttpJoinConverter.java
Copyright Apache License 2.0
Author : apache
@Override
public Schema convertSchemaImpl(Schema inputSchema, WorkUnitState workUnitState) throws SchemaConversionException {
    if (inputSchema == null) {
        throw new SchemaConversionException("input schema is empty");
    }
    List<Schema.Field> fields = AvroUtils.deepCopySchemaFields(inputSchema);
    Schema.Field requestResponseField = new Schema.Field(HTTP_REQUEST_RESPONSE_FIELD, HttpRequestResponseRecord.getClreplacedSchema(), "http output schema contains request url and return result", null);
    fields.add(requestResponseField);
    Schema combinedSchema = Schema.createRecord(inputSchema.getName(), inputSchema.getDoc() + " (Http request and response are contained)", inputSchema.getNamespace(), false);
    combinedSchema.setFields(fields);
    return combinedSchema;
}

10 View Complete Implementation : AvroUtils.java
Copyright Apache License 2.0
Author : apache
/**
 * Merge oldSchema and newSchame. Set a field default value to null, if this field exists in the old schema but not in the new schema.
 * @param oldSchema
 * @param newSchema
 * @return schema that contains all the fields in both old and new schema.
 */
public static Schema nullifyFieldsForSchemaMerge(Schema oldSchema, Schema newSchema) {
    if (oldSchema == null) {
        LOG.warn("No previous schema available, use the new schema instead.");
        return newSchema;
    }
    if (!(oldSchema.getType().equals(Type.RECORD) && newSchema.getType().equals(Type.RECORD))) {
        LOG.warn("Both previous schema and new schema need to be record type. Quit merging schema.");
        return newSchema;
    }
    List<Field> combinedFields = Lists.newArrayList();
    for (Field newFld : newSchema.getFields()) {
        combinedFields.add(new Field(newFld.name(), newFld.schema(), newFld.doc(), newFld.defaultValue()));
    }
    for (Field oldFld : oldSchema.getFields()) {
        if (newSchema.getField(oldFld.name()) == null) {
            List<Schema> union = Lists.newArrayList();
            Schema oldFldSchema = oldFld.schema();
            if (oldFldSchema.getType().equals(Type.UNION)) {
                union.add(Schema.create(Type.NULL));
                for (Schema itemInUion : oldFldSchema.getTypes()) {
                    if (!itemInUion.getType().equals(Type.NULL)) {
                        union.add(itemInUion);
                    }
                }
                Schema newFldSchema = Schema.createUnion(union);
                combinedFields.add(new Field(oldFld.name(), newFldSchema, oldFld.doc(), oldFld.defaultValue()));
            } else {
                union.add(Schema.create(Type.NULL));
                union.add(oldFldSchema);
                Schema newFldSchema = Schema.createUnion(union);
                combinedFields.add(new Field(oldFld.name(), newFldSchema, oldFld.doc(), oldFld.defaultValue()));
            }
        }
    }
    Schema mergedSchema = Schema.createRecord(newSchema.getName(), newSchema.getDoc(), newSchema.getNamespace(), newSchema.isError());
    mergedSchema.setFields(combinedFields);
    return mergedSchema;
}

10 View Complete Implementation : CamusSweeperAvroKeyJob.java
Copyright Apache License 2.0
Author : confluentinc
public Schema duplicateRecord(Schema record, Schema original) {
    List<Field> fields = new ArrayList<Schema.Field>();
    for (Field f : record.getFields()) {
        Schema fldSchema;
        if (original.getField(f.name()) != null) {
            fldSchema = original.getField(f.name()).schema();
        } else {
            fldSchema = f.schema();
        }
        fields.add(new Field(f.name(), fldSchema, f.doc(), f.defaultValue(), f.order()));
    }
    Schema newRecord = Schema.createRecord(original.getName(), record.getDoc(), original.getNamespace(), false);
    newRecord.setFields(fields);
    return newRecord;
}

10 View Complete Implementation : TestCopyCommandClusterChangedNameWithPartitioning.java
Copyright Apache License 2.0
Author : kite-sdk
@Override
public Schema getEvolvedSchema(Schema original) {
    List<Schema.Field> fields = Lists.newArrayList();
    for (Schema.Field field : original.getFields()) {
        fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultValue()));
    }
    Schema evolved = Schema.createRecord("NewUser", original.getDoc(), original.getNamespace(), false);
    evolved.addAlias("User");
    evolved.setFields(fields);
    return evolved;
}

10 View Complete Implementation : NormalizeUtils.java
Copyright Apache License 2.0
Author : Talend
/**
 * Transform input schema to a new schema.
 *
 * The schema of the array field `pathToNormalize` will be modified to the schema of its fields.
 */
public static Schema transformSchema(Schema inputSchema, String[] pathToNormalize, int pathIterator) {
    List<Schema.Field> fieldList = new ArrayList<>();
    for (Schema.Field field : inputSchema.getFields()) {
        Schema unwrappedSchema = getUnwrappedSchema(field);
        if ((pathIterator < pathToNormalize.length) && (field.name().equals(pathToNormalize[pathIterator])) && (unwrappedSchema.getType().equals(Schema.Type.ARRAY))) {
            fieldList.add(new Schema.Field(field.name(), unwrappedSchema.getElementType(), field.doc(), field.defaultVal()));
        } else if (unwrappedSchema.getType().equals(Schema.Type.RECORD)) {
            if ((pathIterator < pathToNormalize.length) && (field.name().equals(pathToNormalize[pathIterator]))) {
                Schema subElementSchema = transformSchema(unwrappedSchema, pathToNormalize, ++pathIterator);
                fieldList.add(new Schema.Field(field.name(), subElementSchema, null, null));
            } else {
                // if we are outside of the pathToNormalize, set the pathIterator at something that cannot be used
                // again
                Schema subElementSchema = transformSchema(unwrappedSchema, pathToNormalize, pathToNormalize.length);
                fieldList.add(new Schema.Field(field.name(), subElementSchema, null, null));
            }
        } else {
            // element add it directly
            fieldList.add(new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultVal()));
        }
    }
    return Schema.createRecord(inputSchema.getName(), inputSchema.getDoc(), inputSchema.getNamespace(), inputSchema.isError(), fieldList);
}

10 View Complete Implementation : SchemaGeneratorUtils.java
Copyright Apache License 2.0
Author : Talend
/**
 * Merge a KV-Schema into a single schema.
 *
 * For each level, the schema will contains the elements present in the keySchema first, then the ones present in
 * the valueSchema.
 *
 * @param keySchema an avro Schema
 * @param valueSchema an avro Schema
 * @return an avro Schema merging the two previous schema
 */
public static Schema mergeKeyValues(Schema keySchema, Schema valueSchema) {
    List<Schema.Field> fieldList = new ArrayList<>();
    for (Field field : keySchema.getFields()) {
        if (valueSchema.getField(field.name()) != null) {
            // element in both key and value => create sub element
            fieldList.add(new Field(field.name(), mergeKeyValues(field.schema(), valueSchema.getField(field.name()).schema()), "", ""));
        } else {
            // Element only present in the key
            fieldList.add(new Field(field.name(), field.schema(), field.doc(), field.defaultVal()));
        }
    }
    for (Field field : valueSchema.getFields()) {
        if (keySchema.getField(field.name()) == null) {
            // Element only present in the value
            fieldList.add(new Field(field.name(), field.schema(), field.doc(), field.defaultVal()));
        }
    }
    if (fieldList.size() > 0) {
        try {
            return Schema.createRecord(keySchema.getName(), keySchema.getDoc(), keySchema.getNamespace(), keySchema.isError(), fieldList);
        } catch (AvroRuntimeException e) {
            // this will be throw if we are trying to get the name of an anonymous type
            return Schema.createRecord(fieldList);
        }
    } else {
        return AvroUtils.createEmptySchema();
    }
}

9 View Complete Implementation : CommonUtils.java
Copyright Apache License 2.0
Author : Talend
public static Schema newSchema(Schema metadataSchema, String newSchemaName, List<Schema.Field> moreFields, int insertPoint) {
    Schema newSchema = Schema.createRecord(newSchemaName, metadataSchema.getDoc(), metadataSchema.getNamespace(), metadataSchema.isError());
    List<Field> fields = metadataSchema.getFields();
    boolean removeDuplicatedNameField = (insertPoint == Integer.MAX_VALUE);
    List<Schema.Field> copyFieldList = cloneFieldsAndResetPosition(fields, removeDuplicatedNameField, moreFields);
    if (removeDuplicatedNameField) {
        copyFieldList.addAll(moreFields);
    } else {
        copyFieldList.addAll(insertPoint, moreFields);
    }
    newSchema.setFields(copyFieldList);
    for (Map.Entry<String, Object> entry : metadataSchema.getObjectProps().entrySet()) {
        newSchema.addProp(entry.getKey(), entry.getValue());
    }
    return newSchema;
}

9 View Complete Implementation : PigSchema2Avro.java
Copyright Apache License 2.0
Author : linkedin
/**
 * Validate a Pig tuple is compatible with Avro record. If the Avro schema
 * is not complete (with uncovered fields), then convert those fields using
 * methods in set 1.
 *
 * Notice that users can get rid of Pig tuple wrappers, e.g. an Avro schema
 * "int" is compatible with a Pig schema "T:(int)"
 */
protected static Schema validateAndConvertRecord(Schema avroSchema, ResourceFieldSchema[] pigFields) throws IOException {
    /* Get rid of Pig tuple wrappers. */
    if (!avroSchema.getType().equals(Schema.Type.RECORD)) {
        if (pigFields.length != 1)
            throw new IOException("Expect only one field in Pig tuple schema. Avro schema is " + avroSchema.getType());
        return validateAndConvert(avroSchema, pigFields[0]);
    }
    /* validate and convert a pig tuple with avro record */
    boolean isPartialSchema = AvroStorageUtils.isUDPartialRecordSchema(avroSchema);
    AvroStorageLog.details("isPartialSchema=" + isPartialSchema);
    String typeName = isPartialSchema ? getRecordName() : avroSchema.getName();
    Schema outSchema = Schema.createRecord(typeName, avroSchema.getDoc(), avroSchema.getNamespace(), false);
    List<Schema.Field> inFields = avroSchema.getFields();
    if (!isPartialSchema && inFields.size() != pigFields.length) {
        throw new IOException("Expect " + inFields.size() + " fields in pig schema." + " But there are " + pigFields.length);
    }
    List<Schema.Field> outFields = new ArrayList<Schema.Field>();
    for (int i = 0; i < pigFields.length; i++) {
        /* get user defined avro field schema */
        Field inputField = isPartialSchema ? AvroStorageUtils.getUDField(avroSchema, i) : inFields.get(i);
        /* get schema */
        Schema fieldSchema = null;
        if (inputField == null) {
            /* convert pig schema (nullable) */
            fieldSchema = convert(pigFields[i], true);
        } else if (inputField.schema() == null) {
            /* convert pig schema (not-null) */
            fieldSchema = convert(pigFields[i], false);
        } else {
            /* validate pigFields[i] with given avro schema */
            fieldSchema = validateAndConvert(inputField.schema(), pigFields[i]);
        }
        /* get field name of output */
        String outname = (isPartialSchema) ? pigFields[i].getName() : inputField.name();
        if (outname == null)
            // field name cannot be null
            outname = FIELD_NAME + "_" + i;
        /* get doc of output */
        String doc = (isPartialSchema) ? pigFields[i].getDescription() : inputField.doc();
        JsonNode defaultvalue = (inputField != null) ? inputField.defaultValue() : null;
        outFields.add(new Field(outname, fieldSchema, doc, defaultvalue));
    }
    outSchema.setFields(outFields);
    return outSchema;
}

9 View Complete Implementation : MarketoUtils.java
Copyright Apache License 2.0
Author : Talend
public static Schema newSchema(Schema metadataSchema, String newSchemaName, List<Schema.Field> moreFields) {
    Schema newSchema = Schema.createRecord(newSchemaName, metadataSchema.getDoc(), metadataSchema.getNamespace(), metadataSchema.isError());
    List<Schema.Field> copyFieldList = new ArrayList<>();
    for (Schema.Field se : metadataSchema.getFields()) {
        Schema.Field field = new Schema.Field(se.name(), se.schema(), se.doc(), se.defaultVal(), se.order());
        field.getObjectProps().putAll(se.getObjectProps());
        for (Map.Entry<String, Object> entry : se.getObjectProps().entrySet()) {
            field.addProp(entry.getKey(), entry.getValue());
        }
        copyFieldList.add(field);
    }
    copyFieldList.addAll(moreFields);
    newSchema.setFields(copyFieldList);
    for (Map.Entry<String, Object> entry : metadataSchema.getObjectProps().entrySet()) {
        newSchema.addProp(entry.getKey(), entry.getValue());
    }
    return newSchema;
}

9 View Complete Implementation : TypeConverterUtils.java
Copyright Apache License 2.0
Author : Talend
/**
 * Transform input schema to a new schema.
 * <p>
 * The schema of the array field `pathToConvert` will be modified to the schema of its fields.
 */
public static Schema convertSchema(Schema inputSchema, Stack<String> converterPath, TypeConverterOutputTypes outputType, String outputFormat) {
    List<Schema.Field> fieldList = new ArrayList<>();
    String currentStep = converterPath.pop();
    for (Schema.Field field : inputSchema.getFields()) {
        Schema unwrappedSchema = AvroUtils.unwrapIfNullable(field.schema());
        if (field.name().equals(currentStep)) {
            // We are on the path to be converted
            if (converterPath.size() == 0) {
                // We are on the exact element to convert
                Schema fieldSchema = TypeConverterUtils.getSchema(outputType, outputFormat);
                // Ensure the output is nullable if the input is nullable.
                if (AvroUtils.isNullable(field.schema()))
                    fieldSchema = AvroUtils.wrapAsNullable(fieldSchema);
                fieldList.add(new Schema.Field(field.name(), fieldSchema, field.doc(), field.defaultVal()));
            } else {
                // Going down in the hierarchy
                fieldList.add(new Schema.Field(field.name(), TypeConverterUtils.convertSchema(unwrappedSchema, converterPath, outputType, outputFormat), field.doc(), field.defaultVal()));
            }
        } else {
            // We are not on the path to convert, just recopying schema
            fieldList.add(new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultVal()));
        }
    }
    return Schema.createRecord(inputSchema.getName(), inputSchema.getDoc(), inputSchema.getNamespace(), inputSchema.isError(), fieldList);
}

9 View Complete Implementation : SchemaGeneratorUtils.java
Copyright Apache License 2.0
Author : Talend
/**
 * Create a new schema by extracting elements from the inputSchema that are *not* present in the keyPaths
 *
 * @param inputSchema a schema
 * @param keyPaths a list of path to element that will be considered as keys
 * @param currentPath the current subelement to extract
 * @return a new schema
 */
private static Schema extractValues(Schema inputSchema, List<String> keyPaths, String currentPath) {
    List<Schema.Field> fieldList = new ArrayList<>();
    for (Field field : inputSchema.getFields()) {
        String newPath = currentPath + "." + field.name();
        if (StringUtils.isEmpty(currentPath)) {
            newPath = currentPath + field.name();
        }
        if (keyPaths.contains(newPath)) {
        // Do nothing
        } else {
            Schema unwrappedSchema = getUnwrappedSchema(field);
            if (unwrappedSchema.getType().equals(Type.RECORD)) {
                Schema subElementSchema = extractValues(unwrappedSchema, keyPaths, newPath);
                if (subElementSchema != null) {
                    fieldList.add(new Field(field.name(), subElementSchema, "", ""));
                }
            } else {
                // element add it directly
                fieldList.add(new Field(field.name(), field.schema(), field.doc(), field.defaultVal()));
            }
        }
    }
    if (fieldList.size() > 0) {
        try {
            return Schema.createRecord("value_" + inputSchema.getName(), inputSchema.getDoc(), inputSchema.getNamespace(), inputSchema.isError(), fieldList);
        } catch (AvroRuntimeException e) {
            // this will be throw if we are trying to get the name of an anonymous type
            return Schema.createRecord(fieldList);
        }
    } else {
        return null;
    }
}

9 View Complete Implementation : AvroCodec.java
Copyright Apache License 2.0
Author : viniciusccarvalho
public void init() {
    logger.info("Scanning avro schema resources on clreplacedpath");
    Schema.Parser parser = new Schema.Parser();
    try {
        Resource[] resources = resolver.getResources("clreplacedpath*:/**/*.avsc");
        logger.info("Found {} schemas on clreplacedpath", resources.length);
        for (Resource r : resources) {
            Schema s = parser.parse(r.getInputStream());
            if (!StringUtils.isEmpty(properties.getReaderSchema()) && properties.getReaderSchema().equals(s.getFullName())) {
                readerSchema = s;
            }
            logger.info("Resource {} parsed into schema {}.{}", r.getFilename(), s.getNamespace(), s.getName());
            Integer id = schemaRegistryClient.register(s);
            logger.info("Schema {} registered with id {}", s.getName(), id);
            localSchemaMap.put(s.getNamespace() + "." + s.getName(), id);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
}

9 View Complete Implementation : GobblinTrackingEventFlattenFilterConverter.java
Copyright Apache License 2.0
Author : apache
@Override
public Schema convertSchema(Schema inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
    Preconditions.checkArgument(AvroUtils.checkReaderWriterCompatibility(gobblinTrackingEventSchema, inputSchema, true));
    Schema outputSchema = Schema.createRecord(ConfigUtils.getString(config, NEW_SCHEMA_NAME, inputSchema.getName()), inputSchema.getDoc(), inputSchema.getNamespace(), inputSchema.isError());
    outputSchema.setFields(newFields);
    return outputSchema;
}

9 View Complete Implementation : AvroUtilsTest.java
Copyright Apache License 2.0
Author : apache
@Test
public void testSwitchNamespace() {
    String originalNamespace = "originalNamespace";
    String originalName = "originalName";
    String newNamespace = "newNamespace";
    Schema schema = SchemaBuilder.builder(originalNamespace).record(originalName).fields().requiredDouble("double").optionalFloat("float").endRecord();
    Map<String, String> map = Maps.newHashMap();
    map.put(originalNamespace, newNamespace);
    Schema newSchema = AvroUtils.switchNamespace(schema, map);
    replacedert.replacedertEquals(newSchema.getNamespace(), newNamespace);
    replacedert.replacedertEquals(newSchema.getName(), originalName);
    for (Schema.Field field : newSchema.getFields()) {
        replacedert.replacedertEquals(field, schema.getField(field.name()));
    }
}

9 View Complete Implementation : HoodieAvroUtils.java
Copyright Apache License 2.0
Author : apache
/**
 * Add null fields to preplaceded in schema. Caller is responsible for ensuring there is no duplicates. As different query
 * engines have varying constraints regarding treating the case-sensitivity of fields, its best to let caller
 * determine that.
 *
 * @param schema Preplaceded in schema
 * @param newFieldNames Null Field names to be added
 */
public static Schema appendNullSchemaFields(Schema schema, List<String> newFieldNames) {
    List<Field> newFields = schema.getFields().stream().map(field -> {
        return new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultValue());
    }).collect(Collectors.toList());
    for (String newField : newFieldNames) {
        newFields.add(new Schema.Field(newField, METADATA_FIELD_SCHEMA, "", NullNode.getInstance()));
    }
    Schema newSchema = Schema.createRecord(schema.getName(), schema.getDoc(), schema.getNamespace(), schema.isError());
    newSchema.setFields(newFields);
    return newSchema;
}

9 View Complete Implementation : AvroTypeUtil.java
Copyright Apache License 2.0
Author : apache
/**
 * Converts an Avro Schema to a RecordSchema
 *
 * @param avroSchema the Avro Schema to convert
 * @param schemaText the textual representation of the schema
 * @param schemaId the identifier of the schema
 * @return the Corresponding Record Schema
 */
public static RecordSchema createSchema(final Schema avroSchema, final String schemaText, final SchemaIdentifier schemaId) {
    if (avroSchema == null) {
        throw new IllegalArgumentException("Avro Schema cannot be null");
    }
    final String schemaFullName = avroSchema.getNamespace() + "." + avroSchema.getName();
    final SimpleRecordSchema recordSchema = schemaText == null ? new SimpleRecordSchema(schemaId) : new SimpleRecordSchema(schemaText, AVRO_SCHEMA_FORMAT, schemaId);
    recordSchema.setSchemaName(avroSchema.getName());
    recordSchema.setSchemaNamespace(avroSchema.getNamespace());
    final DataType recordSchemaType = RecordFieldType.RECORD.getRecordDataType(recordSchema);
    final Map<String, DataType> knownRecords = new HashMap<>();
    knownRecords.put(schemaFullName, recordSchemaType);
    final List<RecordField> recordFields = new ArrayList<>(avroSchema.getFields().size());
    for (final Field field : avroSchema.getFields()) {
        final String fieldName = field.name();
        final Schema fieldSchema = field.schema();
        final DataType dataType = AvroTypeUtil.determineDataType(fieldSchema, knownRecords);
        final boolean nullable = isNullable(fieldSchema);
        addFieldToList(recordFields, field, fieldName, fieldSchema, dataType, nullable);
    }
    recordSchema.setFields(recordFields);
    return recordSchema;
}

9 View Complete Implementation : TestReflectLogicalTypes.java
Copyright Apache License 2.0
Author : apache
@Test
public void testDecimalFixed() throws IOException {
    Schema schema = REFLECT.getSchema(DecimalRecordFixed.clreplaced);
    replacedert.replacedertEquals("Should have the correct record name", "org.apache.parquet.avro.TestReflectLogicalTypes", schema.getNamespace());
    replacedert.replacedertEquals("Should have the correct record name", "DecimalRecordFixed", schema.getName());
    replacedert.replacedertEquals("Should have the correct logical type", LogicalTypes.decimal(9, 2), LogicalTypes.fromSchema(schema.getField("decimal").schema()));
    DecimalRecordFixed record = new DecimalRecordFixed();
    record.decimal = new BigDecimal("3.14");
    File test = write(REFLECT, schema, record);
    replacedert.replacedertEquals("Should match the decimal after round trip", Arrays.asList(record), read(REFLECT, schema, test));
}

9 View Complete Implementation : TestReflectLogicalTypes.java
Copyright Apache License 2.0
Author : apache
@Test
public void testDecimalBytes() throws IOException {
    Schema schema = REFLECT.getSchema(DecimalRecordBytes.clreplaced);
    replacedert.replacedertEquals("Should have the correct record name", "org.apache.parquet.avro.TestReflectLogicalTypes", schema.getNamespace());
    replacedert.replacedertEquals("Should have the correct record name", "DecimalRecordBytes", schema.getName());
    replacedert.replacedertEquals("Should have the correct logical type", LogicalTypes.decimal(9, 2), LogicalTypes.fromSchema(schema.getField("decimal").schema()));
    DecimalRecordBytes record = new DecimalRecordBytes();
    record.decimal = new BigDecimal("3.14");
    File test = write(REFLECT, schema, record);
    replacedert.replacedertEquals("Should match the decimal after round trip", Arrays.asList(record), read(REFLECT, schema, test));
}

8 View Complete Implementation : EnvelopePayloadConverter.java
Copyright Apache License 2.0
Author : apache
@Override
public Schema convertSchema(Schema inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
    List<Field> outputSchemaFields = new ArrayList<>();
    for (Field field : inputSchema.getFields()) {
        outputSchemaFields.add(convertFieldSchema(inputSchema, field, workUnit));
    }
    Schema outputSchema = Schema.createRecord(inputSchema.getName(), inputSchema.getDoc(), inputSchema.getNamespace(), inputSchema.isError());
    outputSchema.setFields(outputSchemaFields);
    return outputSchema;
}

8 View Complete Implementation : AvroRelConverter.java
Copyright Apache License 2.0
Author : apache
private static GenericRecord convertToGenericRecord(SamzaSqlRelRecord relRecord, Schema schema) {
    GenericRecord record = new GenericData.Record(schema);
    List<String> fieldNames = relRecord.getFieldNames();
    List<Object> values = relRecord.getFieldValues();
    for (int index = 0; index < fieldNames.size(); index++) {
        if (!fieldNames.get(index).equalsIgnoreCase(SamzaSqlRelMessage.KEY_NAME)) {
            String fieldName = fieldNames.get(index);
            /**
             * It is possible that the destination Avro schema doesn't have all the fields that are projected from the
             * SQL. This is especially possible in SQL statements like
             *        insert into kafka.outputTopic select id, company from profile
             * where company is an avro record in itself whose schema can evolve. When this happens we will end up with
             * fields in the SamzaSQLRelRecord for company field which doesn't have equivalent fields in the outputTopic's schema
             * for company. To support this scenario where the input schemas and output schemas can evolve in their own cadence,
             * We ignore the fields which doesn't have corresponding schema in the output topic.
             */
            if (schema.getField(fieldName) == null) {
                LOG.debug("Schema with Name {} and Namespace {} doesn't contain the fieldName {}, Skipping it.", schema.getName(), schema.getNamespace(), fieldName);
                continue;
            }
            Object relObj = values.get(index);
            Schema fieldSchema = schema.getField(fieldName).schema();
            record.put(fieldName, convertToAvroObject(relObj, getNonNullUnionSchema(fieldSchema)));
        }
    }
    return record;
}

8 View Complete Implementation : Util.java
Copyright Apache License 2.0
Author : linkedin
public static Schema removeUnion(Schema schema) {
    if (schema.getType() == Schema.Type.UNION) {
        List<Schema> schemas = schema.getTypes();
        for (Schema s : schemas) {
            if (s.getType() != Schema.Type.NULL) {
                return removeUnion(s);
            }
        }
    } else if (schema.getType() == Schema.Type.ARRAY) {
        Schema newSchema = Schema.createArray(removeUnion(schema.getElementType()));
        return newSchema;
    } else if (schema.getType() == Schema.Type.RECORD) {
        List<Schema.Field> fields = schema.getFields();
        List<Schema.Field> newFields = new LinkedList<Schema.Field>();
        for (Schema.Field f : fields) {
            newFields.add(new Schema.Field(f.name(), removeUnion(f.schema()), f.doc(), null));
        }
        Schema newSchema = Schema.createRecord(schema.getName(), schema.getDoc(), schema.getNamespace(), false);
        newSchema.setFields(newFields);
        return newSchema;
    }
    return schema;
}

8 View Complete Implementation : AvroStorageSchemaConversionUtilities.java
Copyright Apache License 2.0
Author : sigmoidanalytics
/**
 * Takes an Avro Schema and a Pig RequiredFieldList and returns a new schema
 * with only the required fields, or no if the function can't extract only
 * those fields. Useful for push down projections.
 * @param oldSchema The avro schema from which to extract the schema
 * @param rfl List of required fields
 * @return the new schema
 */
public static Schema newSchemaFromRequiredFieldList(final Schema oldSchema, final List<RequiredField> rfl) {
    List<Schema.Field> fields = Lists.newArrayList();
    for (RequiredField rf : rfl) {
        try {
            Schema.Field f = oldSchema.getField(rf.getAlias());
            if (f == null) {
                return null;
            }
            try {
                if (getPigType(f.schema()) != rf.getType()) {
                    return null;
                }
            } catch (ExecException e) {
                Log.warn("ExecException caught in newSchemaFromRequiredFieldList", e);
                return null;
            }
            if (rf.getSubFields() == null) {
                fields.add(new Schema.Field(f.name(), f.schema(), f.doc(), f.defaultValue()));
            } else {
                Schema innerSchema = newSchemaFromRequiredFieldList(f.schema(), rf.getSubFields());
                if (innerSchema == null) {
                    return null;
                } else {
                    fields.add(new Schema.Field(f.name(), innerSchema, f.doc(), f.defaultValue()));
                }
            }
        } catch (AvroRuntimeException e) {
            return oldSchema;
        }
    }
    Schema newSchema = Schema.createRecord(oldSchema.getName(), "subset of fields from " + oldSchema.getName() + "; " + oldSchema.getDoc(), oldSchema.getNamespace(), false);
    newSchema.setFields(fields);
    return newSchema;
}

8 View Complete Implementation : TAzureStorageOutputTableProperties.java
Copyright Apache License 2.0
Author : Talend
private Schema newSchema(Schema metadataSchema, String newSchemaName, List<Schema.Field> moreFields) {
    Schema newSchema = Schema.createRecord(newSchemaName, metadataSchema.getDoc(), metadataSchema.getNamespace(), metadataSchema.isError());
    List<Schema.Field> copyFieldList = new ArrayList<>();
    for (Schema.Field se : metadataSchema.getFields()) {
        Schema.Field field = new Schema.Field(se.name(), se.schema(), se.doc(), se.defaultVal(), se.order());
        field.getObjectProps().putAll(se.getObjectProps());
        for (Map.Entry<String, Object> entry : se.getObjectProps().entrySet()) {
            field.addProp(entry.getKey(), entry.getValue());
        }
        copyFieldList.add(field);
    }
    copyFieldList.addAll(moreFields);
    newSchema.setFields(copyFieldList);
    for (Map.Entry<String, Object> entry : metadataSchema.getObjectProps().entrySet()) {
        newSchema.addProp(entry.getKey(), entry.getValue());
    }
    return newSchema;
}

8 View Complete Implementation : TFileInputDelimitedProperties.java
Copyright Apache License 2.0
Author : Talend
private Schema newSchema(Schema metadataSchema, String newSchemaName, List<Schema.Field> moreFields) {
    Schema newSchema = Schema.createRecord(newSchemaName, metadataSchema.getDoc(), metadataSchema.getNamespace(), metadataSchema.isError());
    // TODO duplicate with salesforce, make it to a common one?
    List<Schema.Field> copyFieldList = new ArrayList<>();
    for (Schema.Field se : metadataSchema.getFields()) {
        Schema.Field field = new Schema.Field(se.name(), se.schema(), se.doc(), se.defaultVal(), se.order());
        field.getObjectProps().putAll(se.getObjectProps());
        for (Map.Entry<String, Object> entry : se.getObjectProps().entrySet()) {
            field.addProp(entry.getKey(), entry.getValue());
        }
        copyFieldList.add(field);
    }
    copyFieldList.addAll(moreFields);
    newSchema.setFields(copyFieldList);
    for (Map.Entry<String, Object> entry : metadataSchema.getObjectProps().entrySet()) {
        newSchema.addProp(entry.getKey(), entry.getValue());
    }
    return newSchema;
}

8 View Complete Implementation : TFilterRowProperties.java
Copyright Apache License 2.0
Author : Talend
private Schema newSchema(Schema metadataSchema, String newSchemaName, List<Schema.Field> moreFields) {
    Schema newSchema = Schema.createRecord(newSchemaName, metadataSchema.getDoc(), metadataSchema.getNamespace(), metadataSchema.isError());
    // TODO duplicate with salesforce, make it to a common one?
    List<Schema.Field> copyFieldList = new ArrayList<>();
    for (Schema.Field se : metadataSchema.getFields()) {
        Schema.Field field = new Schema.Field(se.name(), se.schema(), se.doc(), se.defaultVal(), se.order());
        field.getObjectProps().putAll(se.getObjectProps());
        copyFieldList.add(field);
    }
    copyFieldList.addAll(moreFields);
    newSchema.setFields(copyFieldList);
    for (Map.Entry<String, Object> entry : metadataSchema.getObjectProps().entrySet()) {
        newSchema.addProp(entry.getKey(), entry.getValue());
    }
    return newSchema;
}

8 View Complete Implementation : MarketoUtils.java
Copyright Apache License 2.0
Author : Talend
/**
 * Modify some fields in the provided schema with new fields definitions
 *
 * @param schema original schema
 * @param changedFields fields to change
 * @return modified schema
 */
public static Schema modifySchemaFields(Schema schema, List<Schema.Field> changedFields) {
    Schema newSchema = Schema.createRecord(schema.getName(), schema.getDoc(), schema.getNamespace(), schema.isError());
    List<Schema.Field> fields = new ArrayList<>();
    for (Schema.Field se : schema.getFields()) {
        Schema.Field field = null;
        for (Field cf : changedFields) {
            if (cf.name().equals(se.name())) {
                field = cf;
                break;
            }
        }
        if (field == null) {
            field = new Schema.Field(se.name(), se.schema(), se.doc(), se.defaultVal(), se.order());
            for (Map.Entry<String, Object> entry : se.getObjectProps().entrySet()) {
                field.addProp(entry.getKey(), entry.getValue());
            }
        }
        fields.add(field);
    }
    newSchema.setFields(fields);
    for (Map.Entry<String, Object> entry : schema.getObjectProps().entrySet()) {
        newSchema.addProp(entry.getKey(), entry.getValue());
    }
    return newSchema;
}

7 View Complete Implementation : AbstractRealtimeRecordReader.java
Copyright Apache License 2.0
Author : apache
private Schema constructHiveOrderedSchema(Schema writerSchema, Map<String, Field> schemaFieldsMap) {
    // Get all column names of hive table
    String hiveColumnString = jobConf.get(hive_metastoreConstants.META_TABLE_COLUMNS);
    String[] hiveColumns = hiveColumnString.split(",");
    List<Field> hiveSchemaFields = new ArrayList<>();
    for (String columnName : hiveColumns) {
        Field field = schemaFieldsMap.get(columnName.toLowerCase());
        if (field != null) {
            hiveSchemaFields.add(new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultValue()));
        } else {
            // Hive has some extra virtual columns like BLOCK__OFFSET__INSIDE__FILE which do not exist in table schema.
            // They will get skipped as they won't be found in the original schema.
            LOG.debug("Skipping Hive Column => " + columnName);
        }
    }
    Schema hiveSchema = Schema.createRecord(writerSchema.getName(), writerSchema.getDoc(), writerSchema.getNamespace(), writerSchema.isError());
    hiveSchema.setFields(hiveSchemaFields);
    return hiveSchema;
}

7 View Complete Implementation : JdbcAvroRecordTest.java
Copyright Apache License 2.0
Author : spotify
@Test
public void shouldCreateSchema() throws ClreplacedNotFoundException, SQLException {
    int fieldCount = 12;
    Schema actual = JdbcAvroSchema.createSchemaByReadingOneRow(DbTestHelper.createConnection(CONNECTION_URL), "COFFEES", "dbeam_generated", "Generate schema from JDBC ResultSet from COFFEES jdbc:h2:mem:test", false);
    replacedert.replacedertNotNull(actual);
    replacedert.replacedertEquals("dbeam_generated", actual.getNamespace());
    replacedert.replacedertEquals("COFFEES", actual.getProp("tableName"));
    replacedert.replacedertEquals("jdbc:h2:mem:test", actual.getProp("connectionUrl"));
    replacedert.replacedertEquals("Generate schema from JDBC ResultSet from COFFEES jdbc:h2:mem:test", actual.getDoc());
    replacedert.replacedertEquals(fieldCount, actual.getFields().size());
    replacedert.replacedertEquals(Lists.newArrayList("COF_NAME", "SUP_ID", "PRICE", "TEMPERATURE", "SIZE", "IS_ARABIC", "SALES", "TOTAL", "CREATED", "UPDATED", "UID", "ROWNUM"), actual.getFields().stream().map(Schema.Field::name).collect(Collectors.toList()));
    for (Schema.Field f : actual.getFields()) {
        replacedert.replacedertEquals(Schema.Type.UNION, f.schema().getType());
        replacedert.replacedertEquals(2, f.schema().getTypes().size());
        replacedert.replacedertEquals(Schema.Type.NULL, f.schema().getTypes().get(0).getType());
    }
    replacedert.replacedertEquals(Schema.Type.STRING, actual.getField("COF_NAME").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.INT, actual.getField("SUP_ID").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.STRING, actual.getField("PRICE").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.FLOAT, actual.getField("TEMPERATURE").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.DOUBLE, actual.getField("SIZE").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.BOOLEAN, actual.getField("IS_ARABIC").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.INT, actual.getField("SALES").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.LONG, actual.getField("TOTAL").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.LONG, actual.getField("CREATED").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.LONG, actual.getField("UPDATED").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.BYTES, actual.getField("UID").schema().getTypes().get(1).getType());
    replacedert.replacedertEquals(Schema.Type.LONG, actual.getField("ROWNUM").schema().getTypes().get(1).getType());
    replacedert.replacedertNull(actual.getField("UPDATED").schema().getTypes().get(1).getProp("logicalType"));
}

7 View Complete Implementation : TestUtils.java
Copyright Apache License 2.0
Author : Talend
public static Schema makeRecordSchema(Schema sourceSchema, Collection<String> targetFieldNames) {
    Schema targetSchema = Schema.createRecord(sourceSchema.getName(), sourceSchema.getDoc(), sourceSchema.getNamespace(), sourceSchema.isError());
    List<Schema.Field> copyFieldList = new ArrayList<>();
    for (String targetFieldName : targetFieldNames) {
        Schema.Field field = NetSuiteDatasetRuntimeImpl.getNsFieldByName(sourceSchema, targetFieldName);
        if (field != null) {
            Schema.Field targetField = NetSuiteDatasetRuntimeImpl.copyField(field);
            copyFieldList.add(targetField);
        }
    }
    targetSchema.setFields(copyFieldList);
    for (Map.Entry<String, Object> entry : sourceSchema.getObjectProps().entrySet()) {
        targetSchema.addProp(entry.getKey(), entry.getValue());
    }
    return targetSchema;
}

6 View Complete Implementation : GoraCompiler.java
Copyright Apache License 2.0
Author : apache
private static Schema getRecordSchemaWithDirtySupport(Schema originalSchema, Map<Schema, Schema> queue) throws IOException {
    if (originalSchema.getType() != Type.RECORD) {
        throw new IOException("Gora only supports record schemas.");
    }
    List<Field> originalFields = originalSchema.getFields();
    /* make sure the schema doesn't contain the field __g__dirty */
    for (Field field : originalFields) {
        if (GORA_RESERVED_NAMES.contains(field.name())) {
            throw new IOException("Gora schemas cannot contain the field name " + field.name());
        }
    }
    Schema newSchema = Schema.createRecord(originalSchema.getName(), originalSchema.getDoc(), originalSchema.getNamespace(), originalSchema.isError());
    queue.put(originalSchema, newSchema);
    List<Field> newFields = new ArrayList<>();
    byte[] defaultDirtyBytesValue = new byte[getNumberOfBytesNeededForDirtyBits(originalSchema)];
    Arrays.fill(defaultDirtyBytesValue, (byte) 0);
    JsonNode defaultDirtyJsonValue = JsonNodeFactory.instance.binaryNode(defaultDirtyBytesValue);
    Field dirtyBits = new Field(DIRTY_BYTES_FIELD_NAME, Schema.create(Type.BYTES), "Bytes used to represent weather or not a field is dirty.", defaultDirtyJsonValue);
    newFields.add(dirtyBits);
    for (Field originalField : originalFields) {
        // recursively add dirty support
        Field newField = new Field(originalField.name(), getSchemaWithDirtySupport(originalField.schema(), queue), originalField.doc(), originalField.defaultValue(), originalField.order());
        newFields.add(newField);
    }
    newSchema.setFields(newFields);
    return newSchema;
}

6 View Complete Implementation : TestReflectLogicalTypes.java
Copyright Apache License 2.0
Author : apache
@Test
public void testPairRecord() throws IOException {
    ReflectData model = new ReflectData();
    model.addLogicalTypeConversion(new Conversion<Pair>() {

        @Override
        public Clreplaced<Pair> getConvertedType() {
            return Pair.clreplaced;
        }

        @Override
        public String getLogicalTypeName() {
            return "pair";
        }

        @Override
        public Pair fromRecord(IndexedRecord value, Schema schema, LogicalType type) {
            return Pair.of(value.get(0), value.get(1));
        }

        @Override
        public IndexedRecord toRecord(Pair value, Schema schema, LogicalType type) {
            GenericData.Record record = new GenericData.Record(schema);
            record.put(0, value.first);
            record.put(1, value.second);
            return record;
        }
    });
    LogicalTypes.register("pair", new LogicalTypes.LogicalTypeFactory() {

        private final LogicalType PAIR = new LogicalType("pair");

        @Override
        public LogicalType fromSchema(Schema schema) {
            return PAIR;
        }
    });
    Schema schema = model.getSchema(PairRecord.clreplaced);
    replacedert.replacedertEquals("Should have the correct record name", "org.apache.parquet.avro.TestReflectLogicalTypes", schema.getNamespace());
    replacedert.replacedertEquals("Should have the correct record name", "PairRecord", schema.getName());
    replacedert.replacedertEquals("Should have the correct logical type", "pair", LogicalTypes.fromSchema(schema.getField("pair").schema()).getName());
    PairRecord record = new PairRecord();
    record.pair = Pair.of(34L, 35L);
    List<PairRecord> expected = new ArrayList<PairRecord>();
    expected.add(record);
    File test = write(model, schema, record);
    Pair<Long, Long> actual = AvroTestUtil.<PairRecord>read(model, schema, test).get(0).pair;
    replacedert.replacedertEquals("Data should match after serialization round-trip", 34L, (long) actual.first);
    replacedert.replacedertEquals("Data should match after serialization round-trip", 35L, (long) actual.second);
}