Revert "Move avoid using copy-based buffer creation (#6039)"

This reverts commit 920a94470db04722c74b599a227f930946d0da80.
diff --git a/arrow-array/src/array/dictionary_array.rs b/arrow-array/src/array/dictionary_array.rs
index d6c5dd4..045917a 100644
--- a/arrow-array/src/array/dictionary_array.rs
+++ b/arrow-array/src/array/dictionary_array.rs
@@ -1025,13 +1025,13 @@
         let value_data = ArrayData::builder(DataType::Int8)
             .len(8)
             .add_buffer(Buffer::from(
-                [10_i8, 11, 12, 13, 14, 15, 16, 17].to_byte_slice(),
+                &[10_i8, 11, 12, 13, 14, 15, 16, 17].to_byte_slice(),
             ))
             .build()
             .unwrap();
 
         // Construct a buffer for value offsets, for the nested array:
-        let keys = Buffer::from([2_i16, 3, 4].to_byte_slice());
+        let keys = Buffer::from(&[2_i16, 3, 4].to_byte_slice());
 
         // Construct a dictionary array from the above two
         let key_type = DataType::Int16;
diff --git a/arrow-array/src/array/fixed_size_list_array.rs b/arrow-array/src/array/fixed_size_list_array.rs
index 0d57d9a..6f3a769 100644
--- a/arrow-array/src/array/fixed_size_list_array.rs
+++ b/arrow-array/src/array/fixed_size_list_array.rs
@@ -674,7 +674,7 @@
         assert_eq!(err.to_string(), "Invalid argument error: Found unmasked nulls for non-nullable FixedSizeListArray field \"item\"");
 
         // Valid as nulls in child masked by parent
-        let nulls = NullBuffer::new(BooleanBuffer::new(Buffer::from([0b0000101]), 0, 3));
+        let nulls = NullBuffer::new(BooleanBuffer::new(vec![0b0000101].into(), 0, 3));
         FixedSizeListArray::new(field, 2, values.clone(), Some(nulls));
 
         let field = Arc::new(Field::new("item", DataType::Int64, true));
diff --git a/arrow-array/src/array/map_array.rs b/arrow-array/src/array/map_array.rs
index bddf202..ed22ba1 100644
--- a/arrow-array/src/array/map_array.rs
+++ b/arrow-array/src/array/map_array.rs
@@ -448,20 +448,20 @@
         // Construct key and values
         let keys_data = ArrayData::builder(DataType::Int32)
             .len(8)
-            .add_buffer(Buffer::from([0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
+            .add_buffer(Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
             .build()
             .unwrap();
         let values_data = ArrayData::builder(DataType::UInt32)
             .len(8)
             .add_buffer(Buffer::from(
-                [0u32, 10, 20, 30, 40, 50, 60, 70].to_byte_slice(),
+                &[0u32, 10, 20, 30, 40, 50, 60, 70].to_byte_slice(),
             ))
             .build()
             .unwrap();
 
         // Construct a buffer for value offsets, for the nested array:
         //  [[0, 1, 2], [3, 4, 5], [6, 7]]
-        let entry_offsets = Buffer::from([0, 3, 6, 8].to_byte_slice());
+        let entry_offsets = Buffer::from(&[0, 3, 6, 8].to_byte_slice());
 
         let keys = Arc::new(Field::new("keys", DataType::Int32, false));
         let values = Arc::new(Field::new("values", DataType::UInt32, false));
@@ -493,13 +493,13 @@
         // Construct key and values
         let key_data = ArrayData::builder(DataType::Int32)
             .len(8)
-            .add_buffer(Buffer::from([0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
+            .add_buffer(Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
             .build()
             .unwrap();
         let value_data = ArrayData::builder(DataType::UInt32)
             .len(8)
             .add_buffer(Buffer::from(
-                [0u32, 10, 20, 0, 40, 0, 60, 70].to_byte_slice(),
+                &[0u32, 10, 20, 0, 40, 0, 60, 70].to_byte_slice(),
             ))
             .null_bit_buffer(Some(Buffer::from(&[0b11010110])))
             .build()
@@ -507,7 +507,7 @@
 
         // Construct a buffer for value offsets, for the nested array:
         //  [[0, 1, 2], [3, 4, 5], [6, 7]]
-        let entry_offsets = Buffer::from([0, 3, 6, 8].to_byte_slice());
+        let entry_offsets = Buffer::from(&[0, 3, 6, 8].to_byte_slice());
 
         let keys_field = Arc::new(Field::new("keys", DataType::Int32, false));
         let values_field = Arc::new(Field::new("values", DataType::UInt32, true));
@@ -617,18 +617,18 @@
         // Construct key and values
         let keys_data = ArrayData::builder(DataType::Int32)
             .len(5)
-            .add_buffer(Buffer::from([3, 4, 5, 6, 7].to_byte_slice()))
+            .add_buffer(Buffer::from(&[3, 4, 5, 6, 7].to_byte_slice()))
             .build()
             .unwrap();
         let values_data = ArrayData::builder(DataType::UInt32)
             .len(5)
-            .add_buffer(Buffer::from([30u32, 40, 50, 60, 70].to_byte_slice()))
+            .add_buffer(Buffer::from(&[30u32, 40, 50, 60, 70].to_byte_slice()))
             .build()
             .unwrap();
 
         // Construct a buffer for value offsets, for the nested array:
         //  [[3, 4, 5], [6, 7]]
-        let entry_offsets = Buffer::from([0, 3, 5].to_byte_slice());
+        let entry_offsets = Buffer::from(&[0, 3, 5].to_byte_slice());
 
         let keys = Arc::new(Field::new("keys", DataType::Int32, false));
         let values = Arc::new(Field::new("values", DataType::UInt32, false));
diff --git a/arrow-array/src/array/struct_array.rs b/arrow-array/src/array/struct_array.rs
index 44a7f38..ae29294 100644
--- a/arrow-array/src/array/struct_array.rs
+++ b/arrow-array/src/array/struct_array.rs
@@ -549,7 +549,7 @@
         let expected_string_data = ArrayData::builder(DataType::Utf8)
             .len(4)
             .null_bit_buffer(Some(Buffer::from(&[9_u8])))
-            .add_buffer(Buffer::from([0, 3, 3, 3, 7].to_byte_slice()))
+            .add_buffer(Buffer::from(&[0, 3, 3, 3, 7].to_byte_slice()))
             .add_buffer(Buffer::from(b"joemark"))
             .build()
             .unwrap();
@@ -557,7 +557,7 @@
         let expected_int_data = ArrayData::builder(DataType::Int32)
             .len(4)
             .null_bit_buffer(Some(Buffer::from(&[11_u8])))
-            .add_buffer(Buffer::from([1, 2, 0, 4].to_byte_slice()))
+            .add_buffer(Buffer::from(&[1, 2, 0, 4].to_byte_slice()))
             .build()
             .unwrap();
 
diff --git a/arrow-buffer/src/buffer/immutable.rs b/arrow-buffer/src/buffer/immutable.rs
index 52e201c..2c74384 100644
--- a/arrow-buffer/src/buffer/immutable.rs
+++ b/arrow-buffer/src/buffer/immutable.rs
@@ -543,7 +543,7 @@
 
     #[test]
     fn test_access_concurrently() {
-        let buffer = Buffer::from([1, 2, 3, 4, 5]);
+        let buffer = Buffer::from(vec![1, 2, 3, 4, 5]);
         let buffer2 = buffer.clone();
         assert_eq!([1, 2, 3, 4, 5], buffer.as_slice());
 
diff --git a/arrow-buffer/src/util/bit_chunk_iterator.rs b/arrow-buffer/src/util/bit_chunk_iterator.rs
index 4404509..9e4fb82 100644
--- a/arrow-buffer/src/util/bit_chunk_iterator.rs
+++ b/arrow-buffer/src/util/bit_chunk_iterator.rs
@@ -456,7 +456,7 @@
         const ALLOC_SIZE: usize = 4 * 1024;
         let input = vec![0xFF_u8; ALLOC_SIZE];
 
-        let buffer: Buffer = Buffer::from_vec(input);
+        let buffer: Buffer = Buffer::from(input);
 
         let bitchunks = buffer.bit_chunks(57, ALLOC_SIZE * 8 - 57);
 
diff --git a/arrow-cast/src/base64.rs b/arrow-cast/src/base64.rs
index 534b218..50c7423 100644
--- a/arrow-cast/src/base64.rs
+++ b/arrow-cast/src/base64.rs
@@ -20,7 +20,7 @@
 //! [`StringArray`]: arrow_array::StringArray
 
 use arrow_array::{Array, GenericBinaryArray, GenericStringArray, OffsetSizeTrait};
-use arrow_buffer::{Buffer, OffsetBuffer};
+use arrow_buffer::OffsetBuffer;
 use arrow_schema::ArrowError;
 use base64::encoded_len;
 use base64::engine::Config;
@@ -50,9 +50,7 @@
     assert_eq!(offset, buffer_len);
 
     // Safety: Base64 is valid UTF-8
-    unsafe {
-        GenericStringArray::new_unchecked(offsets, Buffer::from_vec(buffer), array.nulls().cloned())
-    }
+    unsafe { GenericStringArray::new_unchecked(offsets, buffer.into(), array.nulls().cloned()) }
 }
 
 /// Base64 decode each element of `array` with the provided [`Engine`]
@@ -81,7 +79,7 @@
 
     Ok(GenericBinaryArray::new(
         offsets,
-        Buffer::from_vec(buffer),
+        buffer.into(),
         array.nulls().cloned(),
     ))
 }
diff --git a/arrow-data/src/data.rs b/arrow-data/src/data.rs
index d6d6b90..b6ff451 100644
--- a/arrow-data/src/data.rs
+++ b/arrow-data/src/data.rs
@@ -1959,7 +1959,7 @@
             .len(20)
             .offset(5)
             .add_buffer(b1)
-            .null_bit_buffer(Some(Buffer::from([
+            .null_bit_buffer(Some(Buffer::from(vec![
                 0b01011111, 0b10110101, 0b01100011, 0b00011110,
             ])))
             .build()
@@ -2164,7 +2164,7 @@
 
     #[test]
     fn test_count_nulls() {
-        let buffer = Buffer::from([0b00010110, 0b10011111]);
+        let buffer = Buffer::from(vec![0b00010110, 0b10011111]);
         let buffer = NullBuffer::new(BooleanBuffer::new(buffer, 0, 16));
         let count = count_nulls(Some(&buffer), 0, 16);
         assert_eq!(count, 7);
diff --git a/arrow-flight/src/utils.rs b/arrow-flight/src/utils.rs
index 37d7ff9..c1e2d61 100644
--- a/arrow-flight/src/utils.rs
+++ b/arrow-flight/src/utils.rs
@@ -94,7 +94,7 @@
         })
         .map(|batch| {
             reader::read_record_batch(
-                &Buffer::from(data.data_body.as_ref()),
+                &Buffer::from(&data.data_body),
                 batch,
                 schema,
                 dictionaries_by_id,
diff --git a/arrow-integration-test/src/lib.rs b/arrow-integration-test/src/lib.rs
index d1486fd..66fa9f3 100644
--- a/arrow-integration-test/src/lib.rs
+++ b/arrow-integration-test/src/lib.rs
@@ -696,7 +696,7 @@
             let list_data = ArrayData::builder(field.data_type().clone())
                 .len(json_col.count)
                 .offset(0)
-                .add_buffer(Buffer::from(offsets.to_byte_slice()))
+                .add_buffer(Buffer::from(&offsets.to_byte_slice()))
                 .add_child_data(child_array.into_data())
                 .null_bit_buffer(Some(null_buf))
                 .build()
@@ -720,7 +720,7 @@
             let list_data = ArrayData::builder(field.data_type().clone())
                 .len(json_col.count)
                 .offset(0)
-                .add_buffer(Buffer::from(offsets.to_byte_slice()))
+                .add_buffer(Buffer::from(&offsets.to_byte_slice()))
                 .add_child_data(child_array.into_data())
                 .null_bit_buffer(Some(null_buf))
                 .build()
@@ -839,7 +839,7 @@
                 .collect();
             let array_data = ArrayData::builder(field.data_type().clone())
                 .len(json_col.count)
-                .add_buffer(Buffer::from(offsets.to_byte_slice()))
+                .add_buffer(Buffer::from(&offsets.to_byte_slice()))
                 .add_child_data(child_array.into_data())
                 .null_bit_buffer(Some(null_buf))
                 .build()
diff --git a/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs b/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs
index 1a6c4e2..ec88ce3 100644
--- a/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs
+++ b/arrow-integration-testing/src/flight_client_scenarios/integration_test.rs
@@ -262,7 +262,7 @@
 
     while message.header_type() == ipc::MessageHeader::DictionaryBatch {
         reader::read_dictionary(
-            &Buffer::from(data.data_body.as_ref()),
+            &Buffer::from(&data.data_body),
             message
                 .header_as_dictionary_batch()
                 .expect("Error parsing dictionary"),
diff --git a/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs b/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
index 76eb9d8..a03c1cd 100644
--- a/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
+++ b/arrow-integration-testing/src/flight_server_scenarios/integration_test.rs
@@ -364,7 +364,7 @@
 
                 let batch = record_batch_from_message(
                     message,
-                    &Buffer::from(data.data_body.as_ref()),
+                    &Buffer::from(data.data_body),
                     schema_ref.clone(),
                     &dictionaries_by_id,
                 )
@@ -375,7 +375,7 @@
             ipc::MessageHeader::DictionaryBatch => {
                 dictionary_from_message(
                     message,
-                    &Buffer::from(data.data_body.as_ref()),
+                    &Buffer::from(data.data_body),
                     schema_ref.clone(),
                     &mut dictionaries_by_id,
                 )
diff --git a/arrow-ipc/src/compression.rs b/arrow-ipc/src/compression.rs
index 47ea778..0d8b7b4 100644
--- a/arrow-ipc/src/compression.rs
+++ b/arrow-ipc/src/compression.rs
@@ -103,8 +103,8 @@
         } else if let Ok(decompressed_length) = usize::try_from(decompressed_length) {
             // decompress data using the codec
             let input_data = &input[(LENGTH_OF_PREFIX_DATA as usize)..];
-            let v = self.decompress(input_data, decompressed_length as _)?;
-            Buffer::from_vec(v)
+            self.decompress(input_data, decompressed_length as _)?
+                .into()
         } else {
             return Err(ArrowError::IpcError(format!(
                 "Invalid uncompressed length: {decompressed_length}"
diff --git a/arrow-json/src/reader/mod.rs b/arrow-json/src/reader/mod.rs
index 97d9c89..3e1c5d2 100644
--- a/arrow-json/src/reader/mod.rs
+++ b/arrow-json/src/reader/mod.rs
@@ -1850,7 +1850,7 @@
         let c = ArrayDataBuilder::new(c_field.data_type().clone())
             .len(7)
             .add_child_data(d.to_data())
-            .null_bit_buffer(Some(Buffer::from([0b00111011])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00111011])))
             .build()
             .unwrap();
         let b = BooleanArray::from(vec![
@@ -1866,14 +1866,14 @@
             .len(7)
             .add_child_data(b.to_data())
             .add_child_data(c.clone())
-            .null_bit_buffer(Some(Buffer::from([0b00111111])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00111111])))
             .build()
             .unwrap();
         let a_list = ArrayDataBuilder::new(a_field.data_type().clone())
             .len(6)
             .add_buffer(Buffer::from_slice_ref([0i32, 2, 3, 6, 6, 6, 7]))
             .add_child_data(a)
-            .null_bit_buffer(Some(Buffer::from([0b00110111])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00110111])))
             .build()
             .unwrap();
         let expected = make_array(a_list);
diff --git a/arrow-json/src/writer.rs b/arrow-json/src/writer.rs
index 86d2e88..ef4141d 100644
--- a/arrow-json/src/writer.rs
+++ b/arrow-json/src/writer.rs
@@ -927,12 +927,12 @@
 
         let a_values = StringArray::from(vec!["a", "a1", "b", "c", "d", "e"]);
         // list column rows: ["a", "a1"], ["b"], ["c"], ["d"], ["e"]
-        let a_value_offsets = Buffer::from([0, 2, 3, 4, 5, 6].to_byte_slice());
+        let a_value_offsets = Buffer::from(&[0, 2, 3, 4, 5, 6].to_byte_slice());
         let a_list_data = ArrayData::builder(field_c1.data_type().clone())
             .len(5)
             .add_buffer(a_value_offsets)
             .add_child_data(a_values.into_data())
-            .null_bit_buffer(Some(Buffer::from([0b00011111])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00011111])))
             .build()
             .unwrap();
         let a = ListArray::from(a_list_data);
@@ -976,17 +976,17 @@
         // list column rows: [[1, 2], [3]], [], [[4, 5, 6]]
         let a_values = Int32Array::from(vec![1, 2, 3, 4, 5, 6]);
 
-        let a_value_offsets = Buffer::from([0, 2, 3, 6].to_byte_slice());
+        let a_value_offsets = Buffer::from(&[0, 2, 3, 6].to_byte_slice());
         // Construct a list array from the above two
         let a_list_data = ArrayData::builder(list_inner_type.data_type().clone())
             .len(3)
             .add_buffer(a_value_offsets)
-            .null_bit_buffer(Some(Buffer::from([0b00000111])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00000111])))
             .add_child_data(a_values.into_data())
             .build()
             .unwrap();
 
-        let c1_value_offsets = Buffer::from([0, 2, 2, 3].to_byte_slice());
+        let c1_value_offsets = Buffer::from(&[0, 2, 2, 3].to_byte_slice());
         let c1_list_data = ArrayData::builder(field_c1.data_type().clone())
             .len(3)
             .add_buffer(c1_value_offsets)
@@ -1058,12 +1058,12 @@
         // [{"c11": 1, "c12": {"c121": "e"}}, {"c12": {"c121": "f"}}],
         // null,
         // [{"c11": 5, "c12": {"c121": "g"}}]
-        let c1_value_offsets = Buffer::from([0, 2, 2, 3].to_byte_slice());
+        let c1_value_offsets = Buffer::from(&[0, 2, 2, 3].to_byte_slice());
         let c1_list_data = ArrayData::builder(field_c1.data_type().clone())
             .len(3)
             .add_buffer(c1_value_offsets)
             .add_child_data(struct_values.into_data())
-            .null_bit_buffer(Some(Buffer::from([0b00000101])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00000101])))
             .build()
             .unwrap();
         let c1 = ListArray::from(c1_list_data);
@@ -1225,7 +1225,7 @@
         );
 
         // [{"foo": 10}, null, {}, {"bar": 20, "baz": 30, "qux": 40}, {"quux": 50}, {}]
-        let entry_offsets = Buffer::from([0, 1, 1, 1, 4, 5, 5].to_byte_slice());
+        let entry_offsets = Buffer::from(&[0, 1, 1, 1, 4, 5, 5].to_byte_slice());
         let valid_buffer = Buffer::from([0b00111101]);
 
         let map_data = ArrayData::builder(map_data_type.clone())
@@ -1408,7 +1408,7 @@
             );
 
             // [{"list":[{"int32":1,"utf8":"a"},{"int32":null,"utf8":"b"}]},{"list":null},{"list":[{int32":5,"utf8":null}]},{"list":null}]
-            let entry_offsets = Buffer::from([0, 2, 2, 3, 3].to_byte_slice());
+            let entry_offsets = Buffer::from(&[0, 2, 2, 3, 3].to_byte_slice());
             let data = ArrayData::builder(field.data_type().clone())
                 .len(4)
                 .add_buffer(entry_offsets)
diff --git a/arrow-select/src/dictionary.rs b/arrow-select/src/dictionary.rs
index 2a53260..d0b6fcf 100644
--- a/arrow-select/src/dictionary.rs
+++ b/arrow-select/src/dictionary.rs
@@ -297,7 +297,7 @@
 
     #[test]
     fn test_merge_nulls() {
-        let buffer = Buffer::from(b"helloworldbingohelloworld");
+        let buffer = Buffer::from("helloworldbingohelloworld");
         let offsets = OffsetBuffer::from_lengths([5, 5, 5, 5, 5]);
         let nulls = NullBuffer::from(vec![true, false, true, true, true]);
         let values = StringArray::new(offsets, buffer, Some(nulls));
diff --git a/arrow-string/src/substring.rs b/arrow-string/src/substring.rs
index fc2f6c8..f5fe811 100644
--- a/arrow-string/src/substring.rs
+++ b/arrow-string/src/substring.rs
@@ -732,7 +732,7 @@
     }
 
     fn generic_string_with_non_zero_offset<O: OffsetSizeTrait>() {
-        let values = b"hellotherearrow";
+        let values = "hellotherearrow";
         let offsets = &[
             O::zero(),
             O::from_usize(5).unwrap(),
@@ -867,7 +867,7 @@
         let data = ArrayData::builder(GenericStringArray::<O>::DATA_TYPE)
             .len(2)
             .add_buffer(Buffer::from_slice_ref(offsets))
-            .add_buffer(Buffer::from(values.as_bytes()))
+            .add_buffer(Buffer::from(values))
             .null_bit_buffer(Some(Buffer::from(bitmap)))
             .offset(1)
             .build()
diff --git a/arrow/examples/builders.rs b/arrow/examples/builders.rs
index 5c8cd51..ad6b879 100644
--- a/arrow/examples/builders.rs
+++ b/arrow/examples/builders.rs
@@ -88,13 +88,13 @@
     // buffer.
     let value_data = ArrayData::builder(DataType::Int32)
         .len(8)
-        .add_buffer(Buffer::from([0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
+        .add_buffer(Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
         .build()
         .unwrap();
 
     // Construct a buffer for value offsets, for the nested array:
     //  [[0, 1, 2], [3, 4, 5], [6, 7]]
-    let value_offsets = Buffer::from([0, 3, 6, 8].to_byte_slice());
+    let value_offsets = Buffer::from(&[0, 3, 6, 8].to_byte_slice());
 
     // Construct a list array from the above two
     let list_data_type = DataType::List(Arc::new(Field::new("item", DataType::Int32, false)));
diff --git a/arrow/examples/tensor_builder.rs b/arrow/examples/tensor_builder.rs
index 4544053..90ad1b4 100644
--- a/arrow/examples/tensor_builder.rs
+++ b/arrow/examples/tensor_builder.rs
@@ -57,7 +57,7 @@
 
     // In order to build a tensor from an array the function to_byte_slice add the
     // required padding to the elements in the array.
-    let buf = Buffer::from([0, 1, 2, 3, 4, 5, 6, 7, 9, 10].to_byte_slice());
+    let buf = Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7, 9, 10].to_byte_slice());
     let tensor = Int32Tensor::try_new(buf, Some(vec![2, 5]), None, None)?;
     println!("\nInt32 Tensor");
     println!("{tensor:?}");
diff --git a/arrow/tests/array_equal.rs b/arrow/tests/array_equal.rs
index 7ed4dae..15011c5 100644
--- a/arrow/tests/array_equal.rs
+++ b/arrow/tests/array_equal.rs
@@ -445,7 +445,7 @@
     .len(0)
     .add_buffer(Buffer::from([0i32, 2, 3, 4, 6, 7, 8].to_byte_slice()))
     .add_child_data(Int32Array::from(vec![1, 2, -1, -2, 3, 4, -3, -4]).into_data())
-    .null_bit_buffer(Some(Buffer::from([0b00001001])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001001])))
     .build()
     .unwrap()
     .into();
@@ -483,7 +483,7 @@
     .len(6)
     .add_buffer(Buffer::from([0i32, 2, 3, 4, 6, 7, 8].to_byte_slice()))
     .add_child_data(c_values.into_data())
-    .null_bit_buffer(Some(Buffer::from([0b00001001])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001001])))
     .build()
     .unwrap()
     .into();
@@ -506,7 +506,7 @@
     .len(6)
     .add_buffer(Buffer::from([0i32, 2, 3, 4, 6, 7, 8].to_byte_slice()))
     .add_child_data(d_values.into_data())
-    .null_bit_buffer(Some(Buffer::from([0b00001001])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001001])))
     .build()
     .unwrap()
     .into();
@@ -807,7 +807,7 @@
         Field::new("f1", DataType::Utf8, true),
         Field::new("f2", DataType::Int32, true),
     ])))
-    .null_bit_buffer(Some(Buffer::from([0b00001011])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001011])))
     .len(5)
     .add_child_data(strings.to_data())
     .add_child_data(ints.to_data())
@@ -819,7 +819,7 @@
         Field::new("f1", DataType::Utf8, true),
         Field::new("f2", DataType::Int32, true),
     ])))
-    .null_bit_buffer(Some(Buffer::from([0b00001011])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001011])))
     .len(5)
     .add_child_data(strings.to_data())
     .add_child_data(ints_non_null.to_data())
@@ -835,7 +835,7 @@
         Field::new("f1", DataType::Utf8, true),
         Field::new("f2", DataType::Int32, true),
     ])))
-    .null_bit_buffer(Some(Buffer::from([0b00001011])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001011])))
     .len(5)
     .add_child_data(strings.to_data())
     .add_child_data(c_ints_non_null.to_data())
@@ -849,7 +849,7 @@
     let a = ArrayData::builder(DataType::Struct(
         vec![Field::new("f3", a.data_type().clone(), true)].into(),
     ))
-    .null_bit_buffer(Some(Buffer::from([0b00011110])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00011110])))
     .len(5)
     .add_child_data(a.to_data())
     .build()
@@ -868,7 +868,7 @@
         Field::new("f1", DataType::Utf8, true),
         Field::new("f2", DataType::Int32, true),
     ])))
-    .null_bit_buffer(Some(Buffer::from([0b00001011])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001011])))
     .len(5)
     .add_child_data(strings.to_data())
     .add_child_data(ints_non_null.to_data())
@@ -878,7 +878,7 @@
     let b = ArrayData::builder(DataType::Struct(
         vec![Field::new("f3", b.data_type().clone(), true)].into(),
     ))
-    .null_bit_buffer(Some(Buffer::from([0b00011110])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00011110])))
     .len(5)
     .add_child_data(b)
     .build()
@@ -909,7 +909,7 @@
     let a = ArrayData::builder(DataType::Struct(
         vec![Field::new("f1", DataType::Utf8, true)].into(),
     ))
-    .null_bit_buffer(Some(Buffer::from([0b00001010])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001010])))
     .len(5)
     .add_child_data(strings1.to_data())
     .build()
@@ -919,7 +919,7 @@
     let b = ArrayData::builder(DataType::Struct(
         vec![Field::new("f1", DataType::Utf8, true)].into(),
     ))
-    .null_bit_buffer(Some(Buffer::from([0b00001010])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001010])))
     .len(5)
     .add_child_data(strings2.to_data())
     .build()
@@ -939,7 +939,7 @@
     let c = ArrayData::builder(DataType::Struct(
         vec![Field::new("f1", DataType::Utf8, true)].into(),
     ))
-    .null_bit_buffer(Some(Buffer::from([0b00001011])))
+    .null_bit_buffer(Some(Buffer::from(vec![0b00001011])))
     .len(5)
     .add_child_data(strings3.to_data())
     .build()
diff --git a/arrow/tests/array_validation.rs b/arrow/tests/array_validation.rs
index 1321f10..41def90 100644
--- a/arrow/tests/array_validation.rs
+++ b/arrow/tests/array_validation.rs
@@ -63,7 +63,7 @@
 #[should_panic(expected = "null_bit_buffer size too small. got 1 needed 2")]
 fn test_bitmap_too_small() {
     let buffer = make_i32_buffer(9);
-    let null_bit_buffer = Buffer::from([0b11111111]);
+    let null_bit_buffer = Buffer::from(vec![0b11111111]);
 
     ArrayData::try_new(
         DataType::Int32,
diff --git a/parquet/src/arrow/array_reader/byte_view_array.rs b/parquet/src/arrow/array_reader/byte_view_array.rs
index 5845e2c..d1a0313 100644
--- a/parquet/src/arrow/array_reader/byte_view_array.rs
+++ b/parquet/src/arrow/array_reader/byte_view_array.rs
@@ -28,7 +28,6 @@
 use crate::errors::{ParquetError, Result};
 use crate::schema::types::ColumnDescPtr;
 use arrow_array::{builder::make_view, ArrayRef};
-use arrow_buffer::Buffer;
 use arrow_data::ByteView;
 use arrow_schema::DataType as ArrowType;
 use bytes::Bytes;
@@ -667,7 +666,7 @@
             v
         };
 
-        let actual_block_id = output.append_block(Buffer::from_vec(array_buffer));
+        let actual_block_id = output.append_block(array_buffer.into());
         assert_eq!(actual_block_id, buffer_id);
         Ok(read)
     }
diff --git a/parquet/src/arrow/array_reader/list_array.rs b/parquet/src/arrow/array_reader/list_array.rs
index e1752f3..7c66c5c 100644
--- a/parquet/src/arrow/array_reader/list_array.rs
+++ b/parquet/src/arrow/array_reader/list_array.rs
@@ -213,7 +213,7 @@
             return Err(general_err!("Failed to reconstruct list from level data"));
         }
 
-        let value_offsets = Buffer::from(list_offsets.to_byte_slice());
+        let value_offsets = Buffer::from(&list_offsets.to_byte_slice());
 
         let mut data_builder = ArrayData::builder(self.get_data_type().clone())
             .len(list_offsets.len() - 1)
diff --git a/parquet/src/arrow/arrow_writer/levels.rs b/parquet/src/arrow/arrow_writer/levels.rs
index 3e828bb..c50e612 100644
--- a/parquet/src/arrow/arrow_writer/levels.rs
+++ b/parquet/src/arrow/arrow_writer/levels.rs
@@ -1050,7 +1050,7 @@
         let a_list_data = ArrayData::builder(a_list_type.clone())
             .len(5)
             .add_buffer(a_value_offsets)
-            .null_bit_buffer(Some(Buffer::from([0b00011011])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
             .add_child_data(a_values.to_data())
             .build()
             .unwrap();
@@ -1116,7 +1116,7 @@
 
         // Construct a buffer for value offsets, for the nested array:
         //  [[1], [2, 3], null, [4, 5, 6], [7, 8, 9, 10]]
-        let g_value_offsets = arrow::buffer::Buffer::from([0, 1, 3, 3, 6, 10].to_byte_slice());
+        let g_value_offsets = arrow::buffer::Buffer::from(&[0, 1, 3, 3, 6, 10].to_byte_slice());
 
         // Construct a list array from the above two
         let g_list_data = ArrayData::builder(struct_field_g.data_type().clone())
diff --git a/parquet/src/arrow/arrow_writer/mod.rs b/parquet/src/arrow/arrow_writer/mod.rs
index cf46f3b..f83e56c 100644
--- a/parquet/src/arrow/arrow_writer/mod.rs
+++ b/parquet/src/arrow/arrow_writer/mod.rs
@@ -1206,7 +1206,7 @@
 
         // Construct a buffer for value offsets, for the nested array:
         //  [[1], [2, 3], null, [4, 5, 6], [7, 8, 9, 10]]
-        let a_value_offsets = arrow::buffer::Buffer::from([0, 1, 3, 3, 6, 10].to_byte_slice());
+        let a_value_offsets = arrow::buffer::Buffer::from(&[0, 1, 3, 3, 6, 10].to_byte_slice());
 
         // Construct a list array from the above two
         let a_list_data = ArrayData::builder(DataType::List(Arc::new(Field::new(
@@ -1217,7 +1217,7 @@
         .len(5)
         .add_buffer(a_value_offsets)
         .add_child_data(a_values.into_data())
-        .null_bit_buffer(Some(Buffer::from([0b00011011])))
+        .null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
         .build()
         .unwrap();
         let a = ListArray::from(a_list_data);
@@ -1246,7 +1246,7 @@
 
         // Construct a buffer for value offsets, for the nested array:
         //  [[1], [2, 3], [], [4, 5, 6], [7, 8, 9, 10]]
-        let a_value_offsets = arrow::buffer::Buffer::from([0, 1, 3, 3, 6, 10].to_byte_slice());
+        let a_value_offsets = arrow::buffer::Buffer::from(&[0, 1, 3, 3, 6, 10].to_byte_slice());
 
         // Construct a list array from the above two
         let a_list_data = ArrayData::builder(DataType::List(Arc::new(Field::new(
@@ -1405,7 +1405,7 @@
 
         // Construct a buffer for value offsets, for the nested array:
         //  [[1], [2, 3], [], [4, 5, 6], [7, 8, 9, 10]]
-        let g_value_offsets = arrow::buffer::Buffer::from([0, 1, 3, 3, 6, 10].to_byte_slice());
+        let g_value_offsets = arrow::buffer::Buffer::from(&[0, 1, 3, 3, 6, 10].to_byte_slice());
 
         // Construct a list array from the above two
         let g_list_data = ArrayData::builder(struct_field_g.data_type().clone())
@@ -1420,7 +1420,7 @@
             .len(5)
             .add_buffer(g_value_offsets)
             .add_child_data(g_value.to_data())
-            .null_bit_buffer(Some(Buffer::from([0b00011011])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
             .build()
             .unwrap();
         let h = ListArray::from(h_list_data);
@@ -1525,14 +1525,14 @@
         let c = Int32Array::from(vec![Some(1), None, Some(3), None, None, Some(6)]);
         let b_data = ArrayDataBuilder::new(field_b.data_type().clone())
             .len(6)
-            .null_bit_buffer(Some(Buffer::from([0b00100111])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00100111])))
             .add_child_data(c.into_data())
             .build()
             .unwrap();
         let b = StructArray::from(b_data);
         let a_data = ArrayDataBuilder::new(field_a.data_type().clone())
             .len(6)
-            .null_bit_buffer(Some(Buffer::from([0b00101111])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00101111])))
             .add_child_data(b.into_data())
             .build()
             .unwrap();
@@ -1595,7 +1595,7 @@
         let c = Int32Array::from(vec![1, 2, 3, 4, 5, 6]);
         let b_data = ArrayDataBuilder::new(type_b)
             .len(6)
-            .null_bit_buffer(Some(Buffer::from([0b00100111])))
+            .null_bit_buffer(Some(Buffer::from(vec![0b00100111])))
             .add_child_data(c.into_data())
             .build()
             .unwrap();
@@ -2280,7 +2280,7 @@
 
         // Build [[], null, [null, null]]
         let a_values = NullArray::new(2);
-        let a_value_offsets = arrow::buffer::Buffer::from([0, 0, 0, 2].to_byte_slice());
+        let a_value_offsets = arrow::buffer::Buffer::from(&[0, 0, 0, 2].to_byte_slice());
         let a_list_data = ArrayData::builder(DataType::List(Arc::new(Field::new(
             "item",
             DataType::Null,
@@ -2288,7 +2288,7 @@
         ))))
         .len(3)
         .add_buffer(a_value_offsets)
-        .null_bit_buffer(Some(Buffer::from([0b00000101])))
+        .null_bit_buffer(Some(Buffer::from(vec![0b00000101])))
         .add_child_data(a_values.into_data())
         .build()
         .unwrap();
@@ -2310,7 +2310,7 @@
     #[test]
     fn list_single_column() {
         let a_values = Int32Array::from(vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
-        let a_value_offsets = arrow::buffer::Buffer::from([0, 1, 3, 3, 6, 10].to_byte_slice());
+        let a_value_offsets = arrow::buffer::Buffer::from(&[0, 1, 3, 3, 6, 10].to_byte_slice());
         let a_list_data = ArrayData::builder(DataType::List(Arc::new(Field::new(
             "item",
             DataType::Int32,
@@ -2318,7 +2318,7 @@
         ))))
         .len(5)
         .add_buffer(a_value_offsets)
-        .null_bit_buffer(Some(Buffer::from([0b00011011])))
+        .null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
         .add_child_data(a_values.into_data())
         .build()
         .unwrap();
@@ -2334,7 +2334,7 @@
     #[test]
     fn large_list_single_column() {
         let a_values = Int32Array::from(vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
-        let a_value_offsets = arrow::buffer::Buffer::from([0i64, 1, 3, 3, 6, 10].to_byte_slice());
+        let a_value_offsets = arrow::buffer::Buffer::from(&[0i64, 1, 3, 3, 6, 10].to_byte_slice());
         let a_list_data = ArrayData::builder(DataType::LargeList(Arc::new(Field::new(
             "large_item",
             DataType::Int32,
@@ -2343,7 +2343,7 @@
         .len(5)
         .add_buffer(a_value_offsets)
         .add_child_data(a_values.into_data())
-        .null_bit_buffer(Some(Buffer::from([0b00011011])))
+        .null_bit_buffer(Some(Buffer::from(vec![0b00011011])))
         .build()
         .unwrap();