// Copy data from HDFS to local file system
hadoop fs -get /user/hive/warehouse/retail_stage.db/orders/part-m-00000.avro
avro-tools getschema part-m-00000.avro > orders.avsc
// Create HDFS directory /user/hive/schemas to keep avro schema files generated as part of previous sqoop command
// Do not use /user/hive in actual projects. /user/hive is special directory
// Probably you might have to use directories specific to your application
hadoop fs -mkdir /user/hive/schemas
hadoop fs -copyFromLocal orders.avsc /user/hive/schemas/order
hadoop fs -ls /user/hive/schemas/order
// Launch HIVE using 'hive' command in a separate terminal
// Below HIVE command will create a table pointing to the avro data file for orders data
// Table will be created in default database in this case
create external table orders_sqoop
STORED AS AVRO
LOCATION '/user/hive/warehouse/retail_stage.db/orders'
TBLPROPERTIES ('avro.schema.url'='/user/hive/schemas/order/orders.avsc')