Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
b127a21
feat(sql): show timeseries order by timeseries
xiangmy21 Nov 4, 2025
d4fd695
feat(Temp): show timeseries order by timeseries
xiangmy21 Nov 4, 2025
94e783d
feat: show timeseries order by timeseries (pushDown for single region)
xiangmy21 Nov 28, 2025
1fff1ba
fix: ShowStatement limit default value should be -1 not 0.
xiangmy21 Dec 26, 2025
66833ce
feat: (show timeseries order by timeseries) limit/offset pushdown to …
xiangmy21 Jan 16, 2026
7dba690
code-style: spotless apply
xiangmy21 Jan 22, 2026
e6c2981
fix: limit default value reset to 0 due to project convention.
xiangmy21 Jan 22, 2026
0177c66
fix: disable the offset-pruning pushdown when showTimeSeriesPlan.getS…
xiangmy21 Jan 27, 2026
4027f97
fix: update subtreeMeasurementCount for all affected paths when a tem…
xiangmy21 Jan 27, 2026
c56e155
fix: add test for where clause and template change. correct pushdown …
xiangmy21 Jan 27, 2026
6c9ab1b
fix: template in PBTree is not ordered by default. need sort.
xiangmy21 Feb 2, 2026
67412e7
feat: ensure compatibility with timeCondition during sorting
xiangmy21 Feb 2, 2026
0c5eeca
Revert "feat: ensure compatibility with timeCondition during sorting"
xiangmy21 Feb 2, 2026
ab3da03
fix: update test for Order with TimeCondition
xiangmy21 Feb 2, 2026
991dda9
fix: refine the pushdown logic
xiangmy21 Feb 2, 2026
9ce3e71
feat: pushdown for PBTree ASC situation.
xiangmy21 Feb 3, 2026
fd34cc9
refactor: schemaFilter also pushdown to MemTree
xiangmy21 Feb 3, 2026
dccb042
refactor: refine pushdown logic in LogicalPlanVisitor
xiangmy21 Feb 3, 2026
42e1528
refactor: avoid duplicate templateInfo parsing
xiangmy21 Feb 3, 2026
2121c9a
refactor: use one Ordering timeseriesOrdering instead of two boolean.
xiangmy21 Feb 3, 2026
d906f93
fix(pbtree): lazily init direct children iterator to avoid cache evic…
xiangmy21 Feb 3, 2026
3472b13
Merge remote-tracking branch 'origin/master' into feat_timeseries_order
xiangmy21 Feb 3, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,278 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.it.schema;

import org.apache.iotdb.commons.schema.column.ColumnHeaderConstant;
import org.apache.iotdb.it.env.EnvFactory;
import org.apache.iotdb.itbase.category.ClusterIT;
import org.apache.iotdb.itbase.category.LocalStandaloneIT;
import org.apache.iotdb.util.AbstractSchemaIT;

import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runners.Parameterized;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

@Category({LocalStandaloneIT.class, ClusterIT.class})
public class IoTDBShowTimeseriesOrderByTimeseriesIT extends AbstractSchemaIT {

private static final List<String> BASE_TIMESERIES_DB1 =
Arrays.asList("root.db1.devA.m1", "root.db1.devB.m1", "root.db1.devA.m2", "root.db1.devB.x");
private static final List<String> BASE_TIMESERIES_DB2 =
Arrays.asList("root.db2.devA.m1", "root.db2.devC.m3", "root.db2.devC.m0");
private static final List<String> BASE_TIMESERIES = // combine db1 and db2
Stream.concat(BASE_TIMESERIES_DB1.stream(), BASE_TIMESERIES_DB2.stream())
.collect(Collectors.toList());

public IoTDBShowTimeseriesOrderByTimeseriesIT(SchemaTestMode schemaTestMode) {
super(schemaTestMode);
}

@Parameterized.BeforeParam
public static void before() throws Exception {
setUpEnvironment();
EnvFactory.getEnv().initClusterEnvironment();
}

@Parameterized.AfterParam
public static void after() throws Exception {
EnvFactory.getEnv().cleanClusterEnvironment();
tearDownEnvironment();
}

@After
public void tearDown() throws Exception {
clearSchema();
}

private void prepareComplexSchema() throws Exception {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("CREATE DATABASE root.db1");
statement.execute("CREATE DATABASE root.db2");

for (String ts : BASE_TIMESERIES) {
statement.execute(
String.format(
"create timeseries %s with datatype=INT32, encoding=RLE, compression=SNAPPY", ts));
}
}
}

private List<String> queryTimeseries(final String sql) throws Exception {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql)) {
List<String> result = new ArrayList<>();
while (resultSet.next()) {
result.add(resultSet.getString(ColumnHeaderConstant.TIMESERIES));
}
return result;
}
}

@Test
public void testOrderAscWithoutLimit() throws Exception {
prepareComplexSchema();
List<String> expected = new ArrayList<>(BASE_TIMESERIES);
Collections.sort(expected);

List<String> actual = queryTimeseries("show timeseries root.db*.** order by timeseries");
assertEquals(expected, actual);
}

@Test
public void testOrderDescWithOffsetLimit() throws Exception {
prepareComplexSchema();
List<String> expected = new ArrayList<>(BASE_TIMESERIES_DB1);
Collections.sort(expected);
Collections.reverse(expected);
expected = expected.subList(1, 3); // offset 1 limit 2

List<String> actual =
queryTimeseries("show timeseries root.db1.** order by timeseries desc offset 1 limit 2");
assertEquals(expected, actual);
}

@Test
public void testInsertThenQueryOrder() throws Exception {
prepareComplexSchema();
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute(
"create timeseries root.db1.devX.a with datatype=INT32, encoding=RLE, compression=SNAPPY");
}

List<String> expected = new ArrayList<>(BASE_TIMESERIES_DB1);
expected.add("root.db1.devX.a");
Collections.sort(expected);

List<String> actual = queryTimeseries("show timeseries root.db1.** order by timeseries");
assertEquals(expected, actual);
}

@Test
public void testDeleteSubtreeThenQueryOrder() throws Exception {
prepareComplexSchema();
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("delete timeseries root.db2.devC.**");
}

List<String> expected = new ArrayList<>(BASE_TIMESERIES_DB2);
expected.remove("root.db2.devC.m0");
expected.remove("root.db2.devC.m3");
Collections.sort(expected);

List<String> actual = queryTimeseries("show timeseries root.db2.** order by timeseries");
assertEquals(expected, actual);
}

@Test
public void testOffsetLimitAfterDeletesAndAdds() throws Exception {
prepareComplexSchema();
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("delete timeseries root.db1.devB.x");
statement.execute(
"create timeseries root.db1.devC.m0 with datatype=INT32, encoding=RLE, compression=SNAPPY");
statement.execute(
"create timeseries root.db1.devZ.z with datatype=INT32, encoding=RLE, compression=SNAPPY");
}

List<String> expected = new ArrayList<>(BASE_TIMESERIES_DB1);
expected.remove("root.db1.devB.x");
expected.add("root.db1.devC.m0");
expected.add("root.db1.devZ.z");
Collections.sort(expected);
expected = expected.subList(2, 4); // offset 2 limit 2

List<String> actual =
queryTimeseries("show timeseries root.db1.** order by timeseries offset 2 limit 2");
assertEquals(expected, actual);
}

@Test
public void testConflictWithLatest() throws Exception {
prepareComplexSchema();
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
try (ResultSet ignored =
statement.executeQuery("show latest timeseries order by timeseries")) {
fail("Expected exception for conflict between LATEST and ORDER BY TIMESERIES");
} catch (SQLException e) {
assertTrue(
e.getMessage().toLowerCase().contains("latest")
&& e.getMessage().toLowerCase().contains("order by timeseries"));
}
}
}

@Test
public void testOrderByWithTimeCondition() throws Exception {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("CREATE DATABASE root.db1");
statement.execute(
"create timeseries root.db1.devA.s1 with datatype=INT32, encoding=RLE, compression=SNAPPY");
statement.execute(
"create timeseries root.db1.devA.s2 with datatype=INT32, encoding=RLE, compression=SNAPPY");
statement.execute(
"create timeseries root.db1.devB.s1 with datatype=INT32, encoding=RLE, compression=SNAPPY");

// s1 has points in [1..5], s2 only [1..3], devB.s1 only [4..5]
for (int t = 1; t <= 5; t++) {
statement.execute(
String.format("insert into root.db1.devA(timestamp, s1) values (%d, %d)", t, t));
}
for (int t = 1; t <= 3; t++) {
statement.execute(
String.format("insert into root.db1.devA(timestamp, s2) values (%d, %d)", t, t));
}
for (int t = 4; t <= 5; t++) {
statement.execute(
String.format("insert into root.db1.devB(timestamp, s1) values (%d, %d)", t, t));
}
}

List<String> actual =
queryTimeseries("show timeseries root.db1.** where time > 3 order by timeseries desc");
assertEquals(Arrays.asList("root.db1.devB.s1", "root.db1.devA.s1"), actual);
}

@Test
public void testWhereClauseOffsetAppliedAfterFilter() throws Exception {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("CREATE DATABASE root.ln");
statement.execute(
"create timeseries root.ln.wf01.wt01.status with datatype=INT32, encoding=RLE, compression=SNAPPY");
statement.execute(
"create timeseries root.ln.wf02.wt01.status with datatype=INT32, encoding=RLE, compression=SNAPPY");
statement.execute(
"create timeseries root.ln.wf02.wt02.status with datatype=INT32, encoding=RLE, compression=SNAPPY");
}

List<String> actual =
queryTimeseries(
"show timeseries root.ln.** where timeseries contains 'wf02.wt' order by timeseries offset 1 limit 1");
assertEquals(Collections.singletonList("root.ln.wf02.wt02.status"), actual);
}

@Test
public void testAlterTemplateUpdatesOffsetOrder() throws Exception {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("CREATE DATABASE root.sg1");
statement.execute("create device template t1 (s1 INT32, s0 INT32)");
statement.execute("set device template t1 to root.sg1.d1");
statement.execute("create timeseries using device template on root.sg1.d1");
statement.execute("set device template t1 to root.sg1.d2");
statement.execute("create timeseries using device template on root.sg1.d2");
}

List<String> before =
queryTimeseries("show timeseries root.sg1.** order by timeseries desc offset 2 limit 2");
assertEquals(Arrays.asList("root.sg1.d1.s1", "root.sg1.d1.s0"), before);

try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("alter device template t1 add (s00 INT32)");
}

List<String> after =
queryTimeseries("show timeseries root.sg1.** order by timeseries offset 3 limit 3");
assertEquals(Arrays.asList("root.sg1.d2.s0", "root.sg1.d2.s00", "root.sg1.d2.s1"), after);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,12 @@ showDevices

// ---- Show Timeseries
showTimeseries
: SHOW LATEST? TIMESERIES prefixPath? timeseriesWhereClause? timeConditionClause? rowPaginationClause?
: SHOW LATEST? TIMESERIES prefixPath? timeseriesWhereClause? timeConditionClause? orderByTimeseriesClause? rowPaginationClause?
;

// order by timeseries for SHOW TIMESERIES
orderByTimeseriesClause
: ORDER BY TIMESERIES (ASC | DESC)?
;

// ---- Show Child Paths
Expand Down Expand Up @@ -1586,4 +1591,4 @@ subStringExpression

signedIntegerLiteral
: (PLUS|MINUS)?INTEGER_LITERAL
;
;
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
import org.apache.iotdb.commons.schema.table.TsTable;
import org.apache.iotdb.commons.schema.table.TsTableInternalRPCType;
import org.apache.iotdb.commons.schema.table.TsTableInternalRPCUtil;
import org.apache.iotdb.commons.schema.template.Template;
import org.apache.iotdb.commons.schema.view.ViewType;
import org.apache.iotdb.commons.schema.view.viewExpression.ViewExpression;
import org.apache.iotdb.commons.service.metric.MetricService;
Expand Down Expand Up @@ -186,6 +187,7 @@
import org.apache.iotdb.db.schemaengine.table.DataNodeTableCache;
import org.apache.iotdb.db.schemaengine.template.ClusterTemplateManager;
import org.apache.iotdb.db.schemaengine.template.TemplateInternalRPCUpdateType;
import org.apache.iotdb.db.schemaengine.template.TemplateInternalRPCUtil;
import org.apache.iotdb.db.service.DataNode;
import org.apache.iotdb.db.service.RegionMigrateService;
import org.apache.iotdb.db.service.externalservice.ExternalServiceManagementService;
Expand Down Expand Up @@ -2571,7 +2573,19 @@ public TSStatus updateTemplate(final TUpdateTemplateReq req) {
ClusterTemplateManager.getInstance().commitTemplatePreSetInfo(req.getTemplateInfo());
break;
case UPDATE_TEMPLATE_INFO:
ClusterTemplateManager.getInstance().updateTemplateInfo(req.getTemplateInfo());
Template newTemplate =
TemplateInternalRPCUtil.parseUpdateTemplateInfoBytes(
ByteBuffer.wrap(req.getTemplateInfo()));
Template oldTemplate =
ClusterTemplateManager.getInstance().getTemplate(newTemplate.getId());
ClusterTemplateManager.getInstance().updateTemplateInfo(newTemplate);
long delta =
newTemplate.getMeasurementNumber()
- (oldTemplate == null ? 0 : oldTemplate.getMeasurementNumber());
if (delta != 0) {
SchemaEngine.getInstance()
.updateSubtreeMeasurementCountForTemplate(newTemplate.getId(), delta);
}
break;
default:
LOGGER.warn("Unsupported type {} when updating template", req.type);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ public ISchemaReader<ITimeSeriesSchemaInfo> getSchemaReader(ISchemaRegion schema
SchemaFilterFactory.and(
schemaFilter, SchemaFilterFactory.createViewTypeFilter(ViewType.VIEW)),
true,
scope));
scope,
null));
} catch (MetadataException e) {
throw new SchemaExecutionException(e.getMessage(), e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.iotdb.commons.schema.table.TsTable;
import org.apache.iotdb.commons.schema.table.column.TsTableColumnSchema;
import org.apache.iotdb.commons.schema.template.Template;
import org.apache.iotdb.db.queryengine.plan.statement.component.Ordering;
import org.apache.iotdb.db.schemaengine.schemaregion.read.resp.info.IDeviceSchemaInfo;
import org.apache.iotdb.db.schemaengine.schemaregion.read.resp.info.INodeSchemaInfo;
import org.apache.iotdb.db.schemaengine.schemaregion.read.resp.info.ITimeSeriesSchemaInfo;
Expand All @@ -47,7 +48,7 @@ public static ISchemaSource<ITimeSeriesSchemaInfo> getTimeSeriesSchemaCountSourc
Map<Integer, Template> templateMap,
PathPatternTree scope) {
return new TimeSeriesSchemaSource(
pathPattern, isPrefixMatch, 0, 0, schemaFilter, templateMap, false, scope);
pathPattern, isPrefixMatch, 0, 0, schemaFilter, templateMap, false, scope, null);
}

// show time series
Expand All @@ -58,9 +59,18 @@ public static ISchemaSource<ITimeSeriesSchemaInfo> getTimeSeriesSchemaScanSource
long offset,
SchemaFilter schemaFilter,
Map<Integer, Template> templateMap,
PathPatternTree scope) {
PathPatternTree scope,
Ordering timeseriesOrdering) {
return new TimeSeriesSchemaSource(
pathPattern, isPrefixMatch, limit, offset, schemaFilter, templateMap, true, scope);
pathPattern,
isPrefixMatch,
limit,
offset,
schemaFilter,
templateMap,
true,
scope,
timeseriesOrdering);
}

// count device
Expand Down
Loading