blob: 0d5235235daee630794f13d59ad86c7fd122e4fb [file] [log] [blame]
(window.webpackJsonp=window.webpackJsonp||[]).push([[31],{596:function(t,e,s){"use strict";s.r(e);var a=s(68),n=Object(a.a)({},(function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("ContentSlotsDistributor",{attrs:{"slot-key":t.$parent.slotKey}},[s("h1",{attrs:{id:"query-by-device-alignment"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#query-by-device-alignment"}},[t._v("#")]),t._v(" Query by device alignment")]),t._v(" "),s("p",[t._v("The table structure of AlignByDevice Query is:")]),t._v(" "),s("table",[s("thead",[s("tr",[s("th",[t._v("time")]),t._v(" "),s("th",[t._v("device")]),t._v(" "),s("th",[t._v("sensor1")]),t._v(" "),s("th",[t._v("sensor2")]),t._v(" "),s("th",[t._v("sensor3")]),t._v(" "),s("th",[t._v("...")])])]),t._v(" "),s("tbody",[s("tr",[s("td"),t._v(" "),s("td"),t._v(" "),s("td"),t._v(" "),s("td"),t._v(" "),s("td"),t._v(" "),s("td")])])]),t._v(" "),s("h2",{attrs:{id:"design-principle"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#design-principle"}},[t._v("#")]),t._v(" Design principle")]),t._v(" "),s("p",[t._v("The implementation principle of the device-by-device query is mainly to calculate the measurement points and filter conditions corresponding to each device in the query, and then the query is performed separately for each device, and the result set is assembled and returned.")]),t._v(" "),s("h3",{attrs:{id:"meaning-of-important-fields-in-alignbydeviceplan"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#meaning-of-important-fields-in-alignbydeviceplan"}},[t._v("#")]),t._v(" Meaning of important fields in AlignByDevicePlan")]),t._v(" "),s("p",[t._v("First explain the meaning of some important fields in AlignByDevicePlan:")]),t._v(" "),s("ul",[s("li",[s("code",[t._v("List<String> measurements")]),t._v(":The list of measurements that appear in the query.")]),t._v(" "),s("li",[s("code",[t._v("List<String> devices")]),t._v(": The list of devices got from prefix paths.")]),t._v(" "),s("li",[s("code",[t._v("Map<String, IExpression> deviceToFilterMap")]),t._v(": This field is used to store the filter conditions corresponding to the device.")]),t._v(" "),s("li",[s("code",[t._v("Map<String, TSDataType> measurementDataTypeMap")]),t._v(":This field is used to record the actual data type of the time series for the actual query, and its key value does not contain the aggregate functions.")]),t._v(" "),s("li",[s("code",[t._v("Map<String, TSDataType> columnDataTypeMap")]),t._v(":This field is used to record the data type of each column in the result set. It's aim is to construct the header and output the result set, whose key value can contain aggregation functions.")]),t._v(" "),s("li",[s("code",[t._v("enum MeasurementType")]),t._v(":Three measurement types are recorded. Measurements that do not exist in any device are of type "),s("code",[t._v("NonExist")]),t._v("; measurements with single or double quotes are of type"),s("code",[t._v("Constant")]),t._v("; measurements that exist are of type "),s("code",[t._v("Exist")]),t._v(".")]),t._v(" "),s("li",[s("code",[t._v("Map<String, MeasurementType> measurementTypeMap")]),t._v(": This field is used to record all measurement types in the query.")]),t._v(" "),s("li",[t._v("groupByTimePlan, fillQueryPlan, aggregationPlan:To avoid redundancy, these three execution plans are set as subclasses of RawDataQueryPlan and set as variables in AlignByDevicePlan. If the query plan belongs to one of these three plans, the field is assigned and saved.")])]),t._v(" "),s("p",[t._v("Before explaining the specific implementation process, a relatively complete example is given first, and the following explanation will be used in conjunction with this example.")]),t._v(" "),s("div",{staticClass:"language-sql line-numbers-mode"},[s("pre",{pre:!0,attrs:{class:"language-sql"}},[s("code",[s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("SELECT")]),t._v(" s1"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'1'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("*")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" s2"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" s5 "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("FROM")]),t._v(" root"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("sg"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("d1"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" root"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("sg"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("*")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("WHERE")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("time")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("1")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("AND")]),t._v(" s1 "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("<")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("25")]),t._v(" ALIGN "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("BY")]),t._v(" DEVICE\n")])]),t._v(" "),s("div",{staticClass:"line-numbers-wrapper"},[s("span",{staticClass:"line-number"},[t._v("1")]),s("br")])]),s("p",[t._v("Among them, the time series in the system is:")]),t._v(" "),s("ul",[s("li",[t._v("root.sg.d1.s1")]),t._v(" "),s("li",[t._v("root.sg.d1.s2")]),t._v(" "),s("li",[t._v("root.sg.d2.s1")])]),t._v(" "),s("p",[t._v("The storage group "),s("code",[t._v("root.sg")]),t._v(" contains two devices d1 and d2, where d1 has two sensors s1 and s2, d2 has only sensor s1, and the same sensor s1 has the same data type.")]),t._v(" "),s("p",[t._v("The following will be explained according to the specific process:")]),t._v(" "),s("h3",{attrs:{id:"logical-plan-generation"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#logical-plan-generation"}},[t._v("#")]),t._v(" Logical plan generation")]),t._v(" "),s("ul",[s("li",[t._v("org.apache.iotdb.db.qp.Planner")])]),t._v(" "),s("p",[t._v("Unlike the original data query, the alignment by device query does not concatenate the suffix paths in the SELECT statement and the WHERE statement at this stage, but when the physical plan is subsequently generated, the mapping value and filter conditions corresponding to each device are calculated.")]),t._v(" "),s("p",[t._v("Therefore, the work done at this stage by device alignment only includes optimization of filter conditions in WHERE statements.")]),t._v(" "),s("p",[t._v("The optimization of the filtering conditions mainly includes three parts: removing the negation, transforming the disjunction paradigm, and merging the same path filtering conditions. The corresponding optimizers are: RemoveNotOptimizer, DnfFilterOptimizer, MergeSingleFilterOptimizer. This part of the logic can refer to:"),s("RouterLink",{attrs:{to:"/SystemDesign/QueryEngine/Planner.html"}},[t._v("Planner")]),t._v(".")],1),t._v(" "),s("h3",{attrs:{id:"physical-plan-generation"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#physical-plan-generation"}},[t._v("#")]),t._v(" Physical plan generation")]),t._v(" "),s("ul",[s("li",[t._v("org.apache.iotdb.db.qp.strategy.PhysicalGenerator")])]),t._v(" "),s("p",[t._v("After the logical plan is generated, the "),s("code",[t._v("transformToPhysicalPlan ()")]),t._v(" method in the PhysicalGenerator class is called to convert the logical plan into a physical plan. For device-aligned queries, the main logic of this method is implemented in the transformQuery () method.")]),t._v(" "),s("p",[s("strong",[t._v("The main work done at this stage is to generate the corresponding")]),t._v(" "),s("code",[t._v("AlignByDevicePlan")]),t._v(","),s("strong",[t._v("Fill in the variable information。")])]),t._v(" "),s("p",[t._v("It splices the suffix paths obtained in the SELECT statement with the prefix paths in the FROM clause to calculate the measurements of the query including its type and data type. The calculation process is as follows:")]),t._v(" "),s("div",{staticClass:"language-java line-numbers-mode"},[s("pre",{pre:!0,attrs:{class:"language-java"}},[s("code",[t._v(" "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Traversal suffix path")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("for")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("int")]),t._v(" i "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("0")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v(" i "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("<")]),t._v(" suffixPaths"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("size")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v(" i"),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("++")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Path")]),t._v(" suffixPath "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" suffixPaths"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("get")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("i"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Used to record the measurements corresponding to a suffix path.")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// See the following for an example")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Set")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" measurementSetOfGivenSuffix "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("new")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("LinkedHashSet")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// If a constant. Recording, continue to the next suffix path")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("if")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("suffixPath"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("startWith")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token string"}},[t._v('"\'"')]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("continue")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// If not constant, it will be spliced with each device to get a complete path")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("for")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),t._v(" device "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v(":")]),t._v(" devices"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Path")]),t._v(" fullPath "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Path")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("addPrefixPath")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("suffixPath"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" device"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("try")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Wildcard has been removed from the device list, but suffix paths may still contain it")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Get the actual time series paths by removing wildcards")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("List")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" actualPaths "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("getMatchedTimeseries")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("fullPath"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("getFullPath")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// If the path after splicing does not exist, it will be recognized as `NonExist` temporarily")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// If the measurement exists in next devices, then override `NonExist` to `Exist`")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("if")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("actualPaths"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("isEmpty")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("&&")]),t._v(" originAggregations"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("isEmpty")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Get data types with and without aggregate functions (actual time series) respectively")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Data type with aggregation function `columnDataTypes` is used for:")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// 1. Data type consistency check 2. Header calculation, output result set")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// The actual data type of the time series `measurementDataTypes` is used for")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// the actual query in the AlignByDeviceDataSet")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),t._v(" aggregation "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v("\n originAggregations "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("!=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("null")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("&&")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("!")]),t._v("originAggregations"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("isEmpty")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("?")]),t._v(" originAggregations"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("get")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("i"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("null")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Pair")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("List")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("TSDataType")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("List")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("TSDataType")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" pair "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("getSeriesTypes")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("actualPaths"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n aggregation"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("List")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("TSDataType")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" columnDataTypes "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" pair"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("left"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("List")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("TSDataType")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" measurementDataTypes "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" pair"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("right"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("for")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("int")]),t._v(" pathIdx "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("0")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v(" pathIdx "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("<")]),t._v(" actualPaths"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("size")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v(" pathIdx"),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("++")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Path")]),t._v(" path "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("new")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Path")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("actualPaths"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("get")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("pathIdx"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Check the data type consistency of the sensors with the same name")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),t._v(" measurementChecked"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("TSDataType")]),t._v(" columnDataType "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" columnDataTypes"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("get")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("pathIdx"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Check data type if there is a sensor with the same name")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("if")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("columnDataTypeMap"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("containsKey")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("measurementChecked"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// The data types is inconsistent, an exception will be thrown. End")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("if")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("!")]),t._v("columnDataType"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("equals")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("columnDataTypeMap"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("get")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("measurementChecked"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("throw")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("new")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("QueryProcessException")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("else")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// There is no such measurement, it will be recorded")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// This step indicates that the measurement exists under the device and is correct,")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// First, update measurementSetOfGivenSuffix which is distinct")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Then if this measurement is recognized as NonExist before,update it to Exist")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("if")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("measurementSetOfGivenSuffix"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("add")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("measurementChecked"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("||")]),t._v(" measurementTypeMap"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("get")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("measurementChecked"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("!=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("MeasurementType")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Exist")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n measurementTypeMap"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("put")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("measurementChecked"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("MeasurementType")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Exist")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// update paths")]),t._v("\n paths"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("add")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("path"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("catch")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("MetadataException")]),t._v(" e"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("throw")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("new")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("LogicalOptimizeException")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// update measurements")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// Note that within a suffix path loop, SET is used to avoid duplicate measurements")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// While a LIST is used outside the loop to ensure that the output contains all measurements entered by the user")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// In the example,for suffix *, measurementSetOfGivenSuffix = {s1,s2}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// for suffix s1, measurementSetOfGivenSuffix = {s1}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// therefore the final measurements is [s1,s2,s1].")]),t._v("\n measurements"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("addAll")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("measurementSetOfGivenSuffix"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n")])]),t._v(" "),s("div",{staticClass:"line-numbers-wrapper"},[s("span",{staticClass:"line-number"},[t._v("1")]),s("br"),s("span",{staticClass:"line-number"},[t._v("2")]),s("br"),s("span",{staticClass:"line-number"},[t._v("3")]),s("br"),s("span",{staticClass:"line-number"},[t._v("4")]),s("br"),s("span",{staticClass:"line-number"},[t._v("5")]),s("br"),s("span",{staticClass:"line-number"},[t._v("6")]),s("br"),s("span",{staticClass:"line-number"},[t._v("7")]),s("br"),s("span",{staticClass:"line-number"},[t._v("8")]),s("br"),s("span",{staticClass:"line-number"},[t._v("9")]),s("br"),s("span",{staticClass:"line-number"},[t._v("10")]),s("br"),s("span",{staticClass:"line-number"},[t._v("11")]),s("br"),s("span",{staticClass:"line-number"},[t._v("12")]),s("br"),s("span",{staticClass:"line-number"},[t._v("13")]),s("br"),s("span",{staticClass:"line-number"},[t._v("14")]),s("br"),s("span",{staticClass:"line-number"},[t._v("15")]),s("br"),s("span",{staticClass:"line-number"},[t._v("16")]),s("br"),s("span",{staticClass:"line-number"},[t._v("17")]),s("br"),s("span",{staticClass:"line-number"},[t._v("18")]),s("br"),s("span",{staticClass:"line-number"},[t._v("19")]),s("br"),s("span",{staticClass:"line-number"},[t._v("20")]),s("br"),s("span",{staticClass:"line-number"},[t._v("21")]),s("br"),s("span",{staticClass:"line-number"},[t._v("22")]),s("br"),s("span",{staticClass:"line-number"},[t._v("23")]),s("br"),s("span",{staticClass:"line-number"},[t._v("24")]),s("br"),s("span",{staticClass:"line-number"},[t._v("25")]),s("br"),s("span",{staticClass:"line-number"},[t._v("26")]),s("br"),s("span",{staticClass:"line-number"},[t._v("27")]),s("br"),s("span",{staticClass:"line-number"},[t._v("28")]),s("br"),s("span",{staticClass:"line-number"},[t._v("29")]),s("br"),s("span",{staticClass:"line-number"},[t._v("30")]),s("br"),s("span",{staticClass:"line-number"},[t._v("31")]),s("br"),s("span",{staticClass:"line-number"},[t._v("32")]),s("br"),s("span",{staticClass:"line-number"},[t._v("33")]),s("br"),s("span",{staticClass:"line-number"},[t._v("34")]),s("br"),s("span",{staticClass:"line-number"},[t._v("35")]),s("br"),s("span",{staticClass:"line-number"},[t._v("36")]),s("br"),s("span",{staticClass:"line-number"},[t._v("37")]),s("br"),s("span",{staticClass:"line-number"},[t._v("38")]),s("br"),s("span",{staticClass:"line-number"},[t._v("39")]),s("br"),s("span",{staticClass:"line-number"},[t._v("40")]),s("br"),s("span",{staticClass:"line-number"},[t._v("41")]),s("br"),s("span",{staticClass:"line-number"},[t._v("42")]),s("br"),s("span",{staticClass:"line-number"},[t._v("43")]),s("br"),s("span",{staticClass:"line-number"},[t._v("44")]),s("br"),s("span",{staticClass:"line-number"},[t._v("45")]),s("br"),s("span",{staticClass:"line-number"},[t._v("46")]),s("br"),s("span",{staticClass:"line-number"},[t._v("47")]),s("br"),s("span",{staticClass:"line-number"},[t._v("48")]),s("br"),s("span",{staticClass:"line-number"},[t._v("49")]),s("br"),s("span",{staticClass:"line-number"},[t._v("50")]),s("br"),s("span",{staticClass:"line-number"},[t._v("51")]),s("br"),s("span",{staticClass:"line-number"},[t._v("52")]),s("br"),s("span",{staticClass:"line-number"},[t._v("53")]),s("br"),s("span",{staticClass:"line-number"},[t._v("54")]),s("br"),s("span",{staticClass:"line-number"},[t._v("55")]),s("br"),s("span",{staticClass:"line-number"},[t._v("56")]),s("br"),s("span",{staticClass:"line-number"},[t._v("57")]),s("br"),s("span",{staticClass:"line-number"},[t._v("58")]),s("br"),s("span",{staticClass:"line-number"},[t._v("59")]),s("br"),s("span",{staticClass:"line-number"},[t._v("60")]),s("br"),s("span",{staticClass:"line-number"},[t._v("61")]),s("br"),s("span",{staticClass:"line-number"},[t._v("62")]),s("br"),s("span",{staticClass:"line-number"},[t._v("63")]),s("br"),s("span",{staticClass:"line-number"},[t._v("64")]),s("br"),s("span",{staticClass:"line-number"},[t._v("65")]),s("br"),s("span",{staticClass:"line-number"},[t._v("66")]),s("br"),s("span",{staticClass:"line-number"},[t._v("67")]),s("br"),s("span",{staticClass:"line-number"},[t._v("68")]),s("br"),s("span",{staticClass:"line-number"},[t._v("69")]),s("br"),s("span",{staticClass:"line-number"},[t._v("70")]),s("br"),s("span",{staticClass:"line-number"},[t._v("71")]),s("br"),s("span",{staticClass:"line-number"},[t._v("72")]),s("br"),s("span",{staticClass:"line-number"},[t._v("73")]),s("br"),s("span",{staticClass:"line-number"},[t._v("74")]),s("br"),s("span",{staticClass:"line-number"},[t._v("75")]),s("br"),s("span",{staticClass:"line-number"},[t._v("76")]),s("br"),s("span",{staticClass:"line-number"},[t._v("77")]),s("br")])]),s("div",{staticClass:"language-java line-numbers-mode"},[s("pre",{pre:!0,attrs:{class:"language-java"}},[s("code",[s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Map")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("IExpression")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" "),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("concatFilterByDevice")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("List")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" devices"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("FilterOperator")]),t._v(" operator"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Input")]),t._v(":"),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Deduplicated")]),t._v(" devices list and un"),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("-")]),t._v("stitched "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("FilterOperator")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Input")]),t._v(":"),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("The")]),t._v(" deviceToFilterMap after splicing records the "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Filter")]),t._v(" information corresponding "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("to")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token namespace"}},[t._v("each")]),t._v(" device\n")])]),t._v(" "),s("div",{staticClass:"line-numbers-wrapper"},[s("span",{staticClass:"line-number"},[t._v("1")]),s("br"),s("span",{staticClass:"line-number"},[t._v("2")]),s("br"),s("span",{staticClass:"line-number"},[t._v("3")]),s("br"),s("span",{staticClass:"line-number"},[t._v("4")]),s("br")])]),s("p",[t._v("The "),s("code",[t._v("concatfilterbydevice()")]),t._v(" method splices the filter conditions according to the devices to get the corresponding filter conditions of each device. The main processing logic of it is in "),s("code",[t._v("concatFilterPath ()")]),t._v(":")]),t._v(" "),s("p",[t._v("The "),s("code",[t._v("concatFilterPath ()")]),t._v(" method traverses the unspliced FilterOperator binary tree to determine whether the node is a leaf node. If so, the path of the leaf node is taken. If the path starts with time or root, it is not processed, otherwise the device name and node are not processed. The paths are spliced and returned; if not, all children of the node are iteratively processed.")]),t._v(" "),s("p",[t._v("In the example, the result of splicing the filter conditions of device 1 is "),s("code",[t._v("time = 1 AND root.sg.d1.s1 <25")]),t._v(", and device 2 is"),s("code",[t._v("time = 1 AND root.sg.d2.s1 <25")]),t._v(".")]),t._v(" "),s("p",[t._v("The following example summarizes the variable information calculated through this stage:")]),t._v(" "),s("ul",[s("li",[t._v("measurement list "),s("code",[t._v("measurements")]),t._v(":"),s("code",[t._v("[s1, '1', s1, s2, s2, s5]")])]),t._v(" "),s("li",[t._v("measurement type "),s("code",[t._v("measurementTypeMap")]),t._v(":\n"),s("ul",[s("li",[s("code",[t._v("s1 -> Exist")])]),t._v(" "),s("li",[s("code",[t._v("s2 -> Exist")])]),t._v(" "),s("li",[s("code",[t._v("'1' -> Constant")])]),t._v(" "),s("li",[s("code",[t._v("s5 -> NonExist")])])])]),t._v(" "),s("li",[t._v("Filter condition "),s("code",[t._v("deviceToFilterMap")]),t._v(" for each device:\n"),s("ul",[s("li",[s("code",[t._v("root.sg.d1 -> time = 1 AND root.sg.d1.s1 < 25")])]),t._v(" "),s("li",[s("code",[t._v("root.sg.d2 -> time = 1 AND root.sg.d2.s1 < 25")])])])])]),t._v(" "),s("h3",{attrs:{id:"constructing-a-header-columnheader"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#constructing-a-header-columnheader"}},[t._v("#")]),t._v(" Constructing a Header (ColumnHeader)")]),t._v(" "),s("ul",[s("li",[t._v("org.apache.iotdb.db.service.TSServiceImpl")])]),t._v(" "),s("p",[t._v("After generating the physical plan, you can execute the executeQueryStatement () method in TSServiceImpl to generate a result set and return it. The first step is to construct the header.")]),t._v(" "),s("p",[t._v("Query by device alignment After calling the TSServiceImpl.getQueryColumnHeaders () method, enter TSServiceImpl.getAlignByDeviceQueryHeaders () according to the query type to construct the headers.")]),t._v(" "),s("p",[t._v("The "),s("code",[t._v("getAlignByDeviceQueryHeaders ()")]),t._v(" method is declared as follows:")]),t._v(" "),s("div",{staticClass:"language-java line-numbers-mode"},[s("pre",{pre:!0,attrs:{class:"language-java"}},[s("code",[s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("private")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("void")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("getAlignByDeviceQueryHeaders")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("AlignByDevicePlan")]),t._v(" plan"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("List")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" respColumns"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("List")]),s("span",{pre:!0,attrs:{class:"token generics"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("String")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" columnTypes"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Input")]),t._v(":"),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("The")]),t._v(" currently executing physical plan "),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("AlignByDevicePlan")]),t._v(" and the column names that need "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("to")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token namespace"}},[t._v("be")]),t._v(" output respColumns and their corresponding data types columnTypes\n"),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Input")]),t._v(":"),s("span",{pre:!0,attrs:{class:"token class-name"}},[t._v("Calculated")]),t._v(" column name respColumns and data type columnTypes\n")])]),t._v(" "),s("div",{staticClass:"line-numbers-wrapper"},[s("span",{staticClass:"line-number"},[t._v("1")]),s("br"),s("span",{staticClass:"line-number"},[t._v("2")]),s("br"),s("span",{staticClass:"line-number"},[t._v("3")]),s("br"),s("span",{staticClass:"line-number"},[t._v("4")]),s("br")])]),s("p",[t._v("The specific implementation logic is as follows:")]),t._v(" "),s("ol",[s("li",[t._v("First add the "),s("code",[t._v("Device")]),t._v(" column, whose data type is"),s("code",[t._v("TEXT")]),t._v(";")]),t._v(" "),s("li",[t._v("Traverse the list of measurements without deduplication to determine the type of measurement currently traversed. If it is an "),s("code",[t._v("Exist")]),t._v(" type, get its type from the "),s("code",[t._v("columnDataTypeMap")]),t._v("; set the other two types to "),s("code",[t._v("TEXT")]),t._v(", and then add measurement and its type to the header data structure.")]),t._v(" "),s("li",[t._v("Deduplicate measurements based on the intermediate variable deduplicatedMeasurements.")])]),t._v(" "),s("p",[t._v("The resulting header is:")]),t._v(" "),s("table",[s("thead",[s("tr",[s("th",[t._v("Time")]),t._v(" "),s("th",[t._v("Device")]),t._v(" "),s("th",[t._v("s1")]),t._v(" "),s("th",[t._v("1")]),t._v(" "),s("th",[t._v("s1")]),t._v(" "),s("th",[t._v("s2")]),t._v(" "),s("th",[t._v("s2")]),t._v(" "),s("th",[t._v("s5")])])]),t._v(" "),s("tbody",[s("tr",[s("td"),t._v(" "),s("td"),t._v(" "),s("td"),t._v(" "),s("td"),t._v(" "),s("td"),t._v(" "),s("td"),t._v(" "),s("td"),t._v(" "),s("td")])])]),t._v(" "),s("p",[t._v("The deduplicated "),s("code",[t._v("measurements")]),t._v(" are "),s("code",[t._v("[s1, '1', s2, s5]")]),t._v(".")]),t._v(" "),s("h3",{attrs:{id:"result-set-generation"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#result-set-generation"}},[t._v("#")]),t._v(" Result set generation")]),t._v(" "),s("p",[t._v("After the ColumnHeader is generated, the final step is to populate the result set with the results and return.")]),t._v(" "),s("h4",{attrs:{id:"result-set-creation"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#result-set-creation"}},[t._v("#")]),t._v(" Result set creation")]),t._v(" "),s("ul",[s("li",[t._v("org.apache.iotdb.db.service.TSServiceImpl")])]),t._v(" "),s("p",[t._v("At this stage, you need to call "),s("code",[t._v("TSServiceImpl.createQueryDataSet ()")]),t._v(" to create a new result set. This part of the implementation logic is relatively simple. For AlignByDeviceQuery, you only need to create a new "),s("code",[t._v("AlignByDeviceDataSet")]),t._v(". In the constructor, the parameters in AlignByDevicePlan Assign to the newly created result set.")]),t._v(" "),s("h4",{attrs:{id:"result-set-population"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#result-set-population"}},[t._v("#")]),t._v(" Result set population")]),t._v(" "),s("ul",[s("li",[t._v("org.apache.iotdb.db.utils.QueryDataSetUtils")])]),t._v(" "),s("p",[t._v("Next you need to fill the results. AlignByDeviceQuery will call the "),s("code",[t._v("TSServiceImpl.fillRpcReturnData ()")]),t._v(" method, and then enter the "),s("code",[t._v("QueryDataSetUtils.convertQueryDataSetByFetchSize ()")]),t._v(" method according to the query type.")]),t._v(" "),s("p",[t._v("The important method for getting results in the "),s("code",[t._v("convertQueryDataSetByFetchSize ()")]),t._v(" method is the "),s("code",[t._v("hasNext ()")]),t._v(" method of QueryDataSet.")]),t._v(" "),s("p",[t._v("The main logic of the "),s("code",[t._v("hasNext ()")]),t._v(" method is as follows:")]),t._v(" "),s("ol",[s("li",[t._v("Determine if there is a specified row offset "),s("code",[t._v("rowOffset")]),t._v(", if there is, skip the number of rows that need to be offset; if the total number of results is less than the specified offset, return false.")]),t._v(" "),s("li",[t._v("Determines whether there is a specified limit on the number of rows "),s("code",[t._v("rowLimit")]),t._v(", if there is, it compares the current number of output rows, and returns false if the current number of output rows is greater than the limit.")]),t._v(" "),s("li",[t._v("Enter "),s("code",[t._v("AlignByDeviceDataSet.hasNextWithoutConstraint ()")]),t._v(" method")])]),t._v(" "),s("br"),t._v(" "),s("ul",[s("li",[t._v("org.apache.iotdb.db.query.dataset.AlignByDeviceDataSet")])]),t._v(" "),s("p",[t._v("First explain the meaning of the important fields in the result set:")]),t._v(" "),s("ul",[s("li",[s("code",[t._v("deviceIterator")]),t._v(": Query by device is essentially to calculate the mapping value and filtering conditions corresponding to each device, and then the query is performed separately for each device. This field is an iterator for the device. Each query obtains a device to perform.")]),t._v(" "),s("li",[s("code",[t._v("currentDataSet")]),t._v(":This field represents the result set obtained by querying a certain device.")])]),t._v(" "),s("p",[t._v("The work done by the "),s("code",[t._v("hasNextWithoutConstraint ()")]),t._v(" method is mainly to determine whether the current result set has the next result, if not, the next device is obtained, the path, data type and filter conditions required by the device to execute the query are calculated, and then executed according to its query type The result set is obtained after a specific query plan, until no device is available for querying.")]),t._v(" "),s("p",[t._v("The specific implementation logic is as follows:")]),t._v(" "),s("ol",[s("li",[t._v("First determine whether the current result set is initialized and there is a next result. If it is, it returns true directly, that is, you can call the "),s("code",[t._v("next()")]),t._v(" method to get the next "),s("code",[t._v("RowRecord")]),t._v("; otherwise, the result set is not initialized and proceeds to step 2.")]),t._v(" "),s("li",[t._v("Iterate "),s("code",[t._v("deviceIterator")]),t._v(" to get the devices needed for this execution, and then find the device node from MManger by the device path to get all sensor nodes under it.")]),t._v(" "),s("li",[t._v("Compare all measurements in the query and the sensor nodes under the current device to get the "),s("code",[t._v("executeColumns")]),t._v(" which need to be queried. Then concatenate the current device name and measurements to calculate the query path, data type, and filter conditions of the current device. The corresponding fields are "),s("code",[t._v("executePaths")]),t._v(","),s("code",[t._v("tsDataTypes")]),t._v(", and "),s("code",[t._v("expression")]),t._v(". If it is an aggregate query, you need to calculate "),s("code",[t._v("executeAggregations")]),t._v(".")]),t._v(" "),s("li",[t._v("Determine whether the current subquery type is GroupByQuery, AggregationQuery, FillQuery or RawDataQuery. Perform the corresponding query and return the result set. The implementation logic "),s("RouterLink",{attrs:{to:"/SystemDesign/DataQuery/RawDataQuery.html"}},[t._v("Raw data query")]),t._v(","),s("RouterLink",{attrs:{to:"/SystemDesign/DataQuery/AggregationQuery.html"}},[t._v("Aggregate query")]),t._v(","),s("RouterLink",{attrs:{to:"/SystemDesign/DataQuery/GroupByQuery.html"}},[t._v("Downsampling query")]),t._v(" can be referenced.")],1)]),t._v(" "),s("p",[t._v("After initializing the result set through the "),s("code",[t._v("hasNextWithoutConstraint ()")]),t._v(" method and ensuring that there is a next result, you can call "),s("code",[t._v("QueryDataSet.next ()")]),t._v(" method to get the next "),s("code",[t._v("RowRecord")]),t._v(".")]),t._v(" "),s("p",[t._v("The "),s("code",[t._v("next ()")]),t._v(" method is mainly implemented as the "),s("code",[t._v("AlignByDeviceDataSet.nextWithoutConstraint ()")]),t._v(" method.")]),t._v(" "),s("p",[t._v("The work done by the "),s("code",[t._v("nextWithoutConstraint ()")]),t._v(" method is to ** transform the time-aligned result set form obtained by a single device query into a device-aligned result set form **, and return the transformed "),s("code",[t._v("RowRecord")]),t._v(".")]),t._v(" "),s("p",[t._v("The specific implementation logic is as follows:")]),t._v(" "),s("ol",[s("li",[t._v("First get the next time-aligned "),s("code",[t._v("originRowRecord")]),t._v(" from the result set.")]),t._v(" "),s("li",[t._v("Create a new "),s("code",[t._v("RowRecord")]),t._v(" with timestamp, add device columns to it, and first create a Map structure"),s("code",[t._v("currentColumnMap")]),t._v(" of "),s("code",[t._v("measurementName-> Field")]),t._v(" according to"),s("code",[t._v("executeColumns")]),t._v(" and the obtained result.")]),t._v(" "),s("li",[t._v("After that, you only need to traverse the deduplicated "),s("code",[t._v("measurements")]),t._v(" list to determine its type. If the type is"),s("code",[t._v("Exist")]),t._v(", get the corresponding result from the "),s("code",[t._v("currentColumnMap")]),t._v(" according to the measurementName. If not, set it to"),s("code",[t._v("null")]),t._v("; if it is "),s("code",[t._v("NonExist")]),t._v("Type is set to"),s("code",[t._v("null")]),t._v(" directly; if it is "),s("code",[t._v("Constant")]),t._v(" type,"),s("code",[t._v("measureName")]),t._v(" is used as the value of this column.")])]),t._v(" "),s("p",[t._v("After writing the output data stream according to the transformed "),s("code",[t._v("RowRecord")]),t._v(", the result set can be returned.")])])}),[],!1,null,null,null);e.default=n.exports}}]);