Avro & Java: Record Parsing

This tutorial will guide you through how to convert json to avro and then back to json. I suggest you first read through the documentation on Avro to familiarize yourself with it. This tutorial assumes you have a maven project already setup and a resources folder.

POM:

Add Avro Dependency

 

 

 

 

Add Jackson Dependency

Avro Schema File:

Next you need to create the avro schema file in your resources folder. Name the file “schema.avsc”. The extension avsc is the Avro schema extension.

  1. {
  2. "namespace": "test.avro",
  3. "type": "record",
  4. "name": "MY_NAME",
  5. "fields": [
  6. {"name": "name_1", "type": "int"},
  7. {"name": "name_2", "type": {"type": "array", "items": "float"}},
  8. {"name": "name_3", "type": "float"}
  9. ]
  10. }

Json Record to Validate:

Next you need to create a json file that conforms to your schema you just made. Name the file “record.json” and put it in your resources folder. The contents can be whatever you want as long as it conforms to your schema above.

  1. { "name_1": 234, "name_2": [23.34,654.98], "name_3": 234.7}

It’s Avro Time:

Imports:

  1. import java.io.ByteArrayOutputStream;
  2. import java.io.DataInputStream;
  3. import java.io.File;
  4. import java.io.IOException;
  5. import java.io.InputStream;
  6.  
  7. import org.apache.avro.Schema;
  8. import org.apache.avro.generic.GenericData;
  9. import org.apache.avro.generic.GenericDatumReader;
  10. import org.apache.avro.generic.GenericDatumWriter;
  11. import org.apache.avro.io.DatumReader;
  12. import org.apache.avro.io.Decoder;
  13. import org.apache.avro.io.DecoderFactory;
  14. import org.apache.avro.io.Encoder;
  15. import org.apache.avro.io.EncoderFactory;
  16.  
  17. import com.fasterxml.jackson.databind.JsonNode;
  18. import com.fasterxml.jackson.databind.ObjectMapper;

Conversion to Avro and Back:

  1. private void run() throws IOException {
  2. //Get the schema and json record from resources
  3. final ClassLoader loader = getClass().getClassLoader();
  4. final File schemaFile = new File(loader.getResource("schema.avsc").getFile());
  5. final InputStream record = loader.getResourceAsStream("record.json");
  6. //Create avro schema
  7. final Schema schema = new Schema.Parser().parse(schemaFile);
  8.  
  9. //Encode to avro
  10. final byte[] avro = encodeToAvro(schema, record);
  11.  
  12. //Decode back to json
  13. final JsonNode node = decodeToJson(schema, avro);
  14.  
  15. System.out.println(node);
  16. System.out.println("done");
  17. }
  18.  
  19. /**
  20. * Encode json to avro
  21. *
  22. * @param schema the schema the avro pertains to
  23. * @param record the data to convert to avro
  24. * @return the avro bytes
  25. * @throws IOException if decoding fails
  26. */
  27. private byte[] encodeToAvro(Schema schema, InputStream record) throws IOException {
  28. final DatumReader<GenericData.Record> reader = new GenericDatumReader<>(schema);
  29. final DataInputStream din = new DataInputStream(record);
  30. final Decoder decoder = new DecoderFactory().jsonDecoder(schema, din);
  31. final Object datum = reader.read(null, decoder);
  32. final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema);
  33. final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  34. final Encoder encoder = new EncoderFactory().binaryEncoder(outputStream, null);
  35. writer.write(datum, encoder);
  36. encoder.flush();
  37.  
  38. return outputStream.toByteArray();
  39. }
  40.  
  41. /**
  42. * Decode avro back to json.
  43. *
  44. * @param schema the schema the avro pertains to
  45. * @param avro the avro bytes
  46. * @return the json
  47. * @throws IOException if jackson fails
  48. */
  49. private JsonNode decodeToJson(Schema schema, byte[] avro) throws IOException {
  50. final ObjectMapper mapper = new ObjectMapper();
  51. final DatumReader<GenericData.Record> reader = new GenericDatumReader<>(schema);
  52. final Decoder decoder = new DecoderFactory().binaryDecoder(avro, null);
  53. final JsonNode node = mapper.readTree(reader.read(null, decoder).toString());
  54.  
  55. return node;
  56. }

Avro & Python: How to Schema, Write, Read

I have been experimenting with Apache Avro and Python. Below is what I have learned thus far.

Pip Install

At the time of this writing I am using 1.8.2.

  1. pip install avro-python3

Schema

There are so many different ways to work with the schema definition. There are primitive and complex types. You can find way more documentation on the schema definition here.

  1. import json
  2. import avro.schema
  3.  
  4. my_schema = avro.schema.Parse(json.dumps(
  5. {
  6. 'namespace': 'test.avro',
  7. 'type': 'record',
  8. 'name': 'MY_NAME',
  9. 'fields': [
  10. {'name': 'name_1', 'type': 'int'},
  11. {'name': 'name_2', 'type': {'type': 'array', 'items': 'float'}},
  12. {'name': 'name_3', 'type': 'float'},
  13. ]
  14. }))

Method 1

Write

  1. from avro.datafile import DataFileWriter
  2. from avro.io import DatumWriter
  3. import io
  4.  
  5. #write binary
  6. file = open(filename, 'wb')
  7.  
  8. datum_writer = DatumWriter()
  9. fwriter = DataFileWriter(file, datum_writer, my_schema)
  10. fwriter.append({'name_1': 645645, 'name_2': [5.6,34.7], 'name_3': 644.5645})
  11. fwriter.close()

Write Deflate

  1. from avro.datafile import DataFileWriter
  2. from avro.io import DatumWriter
  3.  
  4. #write binary
  5. file = open(filename, 'wb')
  6.  
  7. datum_writer = DatumWriter()
  8. fwriter = DataFileWriter(file, datum_writer, my_schema, codec = 'deflate')
  9. fwriter.append({'name_1': 645645, 'name_2': [5.6,34.7], 'name_3': 644.5645})
  10. fwriter.close()

Append

  1. from avro.datafile import DataFileWriter
  2. from avro.io import DatumWriter
  3. import io
  4.  
  5. #append binary
  6. file = open(filename, 'a+b')
  7.  
  8. datum_writer = DatumWriter()
  9. #Notice that the schema is not added the the datafilewriter. This is because you are appending to an existing avro file
  10. fwriter = DataFileWriter(file, datum_writer)
  11. fwriter.append({'name_1': 645675, 'name_2': [5.6,34.9], 'name_3': 649.5645})
  12. fwriter.close()

Read Schema

  1. from avro.datafile import DataFileReader
  2. from avro.io import DatumReader
  3.  
  4. file = open(filename, 'rb')
  5. datum_reader = DatumReader()
  6. file_reader = DataFileReader(file, datum_reader)
  7.  
  8. print(file_reader .meta)

Read

  1. from avro.datafile import DataFileReader
  2. from avro.io import DatumReader
  3.  
  4. #read binary
  5. fd = open(filename, 'rb')
  6. datum_reader = DatumReader()
  7. file_reader = DataFileReader(fd, datum_reader)
  8.  
  9. for datum in file_reader:
  10. print(datum['name_1'])
  11. print(datum['name_2'])
  12. print(datum['name_3'])
  13. file_reader.close()

Method 2

Write/Append BinaryEncoder

  1. import io
  2. from avro.io import DatumWriter, BinaryEncoder
  3.  
  4. #write binary
  5. file = open(filename, 'wb')
  6. #append binary
  7. file = open(filename, 'a+b')
  8. bytes_writer = io.BytesIO()
  9. encoder = BinaryEncoder(bytes_writer)
  10. writer_binary = DatumWriter(my_schema)
  11. writer_binary.write({'name_1': 645645, 'name_2': [5.6,34.7], 'name_3': 644.5645}, encoder)
  12. file.write(bytes_writer.getvalue())

Read BinaryDecoder

  1. import io
  2. from avro.io import DatumReader, BinaryDecoder
  3.  
  4. file = open(filename, 'rb')
  5. bytes_reader = io.BytesIO(file.read())
  6. decoder = BinaryDecoder(bytes_reader)
  7. reader = DatumReader(my_schema)
  8.  
  9. while True:
  10. try:
  11. rec = reader.read(decoder)
  12. print(rec['name_1'])
  13. print(rec['name_2'])
  14. print(rec['name_3'])
  15. except:
  16. break