blob: df97ef1d6947e7b1df4161f0b89291a043c85ea5 [file] [log] [blame]
"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[683],{28453:(e,n,t)=>{t.d(n,{R:()=>a,x:()=>i});var o=t(96540);const r={},s=o.createContext(r);function a(e){const n=o.useContext(s);return o.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function i(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(r):e.components||r:a(e.components),o.createElement(s.Provider,{value:n},e.children)}},76618:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>d,contentTitle:()=>i,default:()=>u,frontMatter:()=>a,metadata:()=>o,toc:()=>c});const o=JSON.parse('{"id":"development/extensions-core/parquet","title":"Apache Parquet Extension","description":"\x3c!--","source":"@site/docs/32.0.0/development/extensions-core/parquet.md","sourceDirName":"development/extensions-core","slug":"/development/extensions-core/parquet","permalink":"/docs/32.0.0/development/extensions-core/parquet","draft":false,"unlisted":false,"tags":[],"version":"current","frontMatter":{"id":"parquet","title":"Apache Parquet Extension"}}');var r=t(74848),s=t(28453);const a={id:"parquet",title:"Apache Parquet Extension"},i=void 0,d={},c=[];function p(e){const n={a:"a",code:"code",p:"p",...(0,s.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsxs)(n.p,{children:["This Apache Druid module extends ",(0,r.jsx)(n.a,{href:"/docs/32.0.0/ingestion/hadoop",children:"Druid Hadoop based indexing"})," to ingest data directly from offline\nApache Parquet files."]}),"\n",(0,r.jsxs)(n.p,{children:["Note: If using the ",(0,r.jsx)(n.code,{children:"parquet-avro"})," parser for Apache Hadoop based indexing, ",(0,r.jsx)(n.code,{children:"druid-parquet-extensions"})," depends on the ",(0,r.jsx)(n.code,{children:"druid-avro-extensions"})," module, so be sure to\n",(0,r.jsx)(n.a,{href:"/docs/32.0.0/configuration/extensions#loading-extensions",children:"include both"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:["The ",(0,r.jsx)(n.code,{children:"druid-parquet-extensions"})," provides the ",(0,r.jsx)(n.a,{href:"/docs/32.0.0/ingestion/data-formats#parquet",children:"Parquet input format"}),", the ",(0,r.jsx)(n.a,{href:"/docs/32.0.0/ingestion/data-formats#parquet-hadoop-parser",children:"Parquet Hadoop parser"}),",\nand the ",(0,r.jsx)(n.a,{href:"/docs/32.0.0/ingestion/data-formats#parquet-avro-hadoop-parser",children:"Parquet Avro Hadoop Parser"})," with ",(0,r.jsx)(n.code,{children:"druid-avro-extensions"}),".\nThe Parquet input format is available for ",(0,r.jsx)(n.a,{href:"/docs/32.0.0/ingestion/native-batch",children:"native batch ingestion"}),"\nand the other 2 parsers are for ",(0,r.jsx)(n.a,{href:"/docs/32.0.0/ingestion/hadoop",children:"Hadoop batch ingestion"}),".\nPlease see corresponding docs for details."]})]})}function u(e={}){const{wrapper:n}={...(0,s.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(p,{...e})}):p(e)}}}]);