Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
95 commits
Select commit Hold shift + click to select a range
4f9843a
Update driver to use validTime for feature as a specific column as ts…
mdhsl Sep 24, 2025
b0db77c
Remove useless classes
mdhsl Sep 24, 2025
bc8fb87
Fix wrong selectLastVersionByUidQuery() function for system
mdhsl Sep 24, 2025
31da427
Do not insert a new system if it already exists: check the uniqueId
mdhsl Sep 24, 2025
c7f91de
Fix datastream issues with validTime
mdhsl Sep 25, 2025
4c90585
Fix datastreams validTime
mdhsl Sep 25, 2025
2cc3779
Fix issue with non thread safe batchPrepareStatement object; improve …
mdhsl Sep 26, 2025
3de0277
Add FoiFilter to filter Obs and fix PostgisFeature unit tests
mdhsl Oct 1, 2025
5d47070
Increase timeout and connection max life time
mdhsl Oct 7, 2025
1b76079
Update driver to use validTime for feature as a specific column as ts…
mdhsl Sep 24, 2025
d7f2e4e
Remove useless classes
mdhsl Sep 24, 2025
f1f1d1c
Fix wrong selectLastVersionByUidQuery() function for system
mdhsl Sep 24, 2025
c98b862
Do not insert a new system if it already exists: check the uniqueId
mdhsl Sep 24, 2025
21e2f9e
Fix datastream issues with validTime
mdhsl Sep 25, 2025
ec0a20b
Fix datastreams validTime
mdhsl Sep 25, 2025
7412f03
Fix issue with non thread safe batchPrepareStatement object; improve …
mdhsl Sep 26, 2025
18ff4f5
Add FoiFilter to filter Obs and fix PostgisFeature unit tests
mdhsl Oct 1, 2025
925d872
Increase timeout and connection max life time
mdhsl Oct 7, 2025
6877aba
Update command status serialization for osh-core changes and add inli…
earocorn Oct 10, 2025
94ac11f
Merge pull request #125 from earocorn/cmd-status-serialization
mdhsl Oct 10, 2025
73d0eb7
Fix BigId due to osh-core changes on command API
mdhsl Oct 20, 2025
87d4828
Fix merge
mdhsl Oct 20, 2025
2777dd8
Use config to specify batch mode
mdhsl Oct 22, 2025
b2b8688
Clear the ThreadSafeBatchExecutor after closing connection
mdhsl Oct 23, 2025
4d41300
Fix drop() into Postgis store add Add support for streaming into Feat…
mdhsl Oct 23, 2025
30f8565
Centralize the use of JDBC connection; add streaming data for OBS, FE…
mdhsl Oct 24, 2025
cda8bde
Add support of obsFilter for FOI queries
mdhsl Oct 25, 2025
9313123
Speed up the batch mode by using addBatch for Feature
mdhsl Oct 25, 2025
a1c9cdd
Change logic when using InnerJoin to add condition directly after the…
mdhsl Oct 28, 2025
6c96e76
Add support for batch removing into obs table; Use validTime into Foi…
mdhsl Oct 28, 2025
0bad812
Fix wrong time range comparison operator into foi filter
mdhsl Oct 28, 2025
a9a7176
Use PostgisFeatureKey to store parentId and change the way to generat…
mdhsl Oct 29, 2025
eff5f66
Fix autoCommit period; Fix some SQL requests for Feature Store
mdhsl Oct 29, 2025
f02557c
Add unique constraint and update Obs query to avoid dupplicated inser…
mdhsl Oct 29, 2025
bf2346a
Improve obs insertion
mdhsl Oct 29, 2025
f64c19e
Fix most unit tests
earocorn Oct 30, 2025
2ac7941
Check existing key only once
earocorn Oct 30, 2025
4d5c8a3
Merge pull request #130 from earocorn/fix-feature-and-other-tests
mdhsl Oct 30, 2025
af4634f
Remove hack to pass time of obsFilter to foiFilter
mdhsl Oct 30, 2025
73f289c
Merge branch 'fix-postgis-datastore' of github.com:opensensorhub/osh-…
mdhsl Oct 30, 2025
e74bc58
Fix FOI parent issue
earocorn Oct 31, 2025
0957cd9
Create the idProvider depending of the type of idProviderTYpe and not…
mdhsl Oct 31, 2025
b51ecd4
Merge pull request #131 from earocorn/fix-feature-and-other-tests
mdhsl Oct 31, 2025
1673092
Merge branch 'fix-postgis-datastore' of github.com:opensensorhub/osh-…
mdhsl Oct 31, 2025
981e338
Merge branch 'fix-postgis-datastore-2' into fix-postgis-datastore
mdhsl Oct 31, 2025
ee54d84
Add support for UID_HASH id provider
mdhsl Oct 31, 2025
577c89f
Refactor QueryBuilder to take into account removeEntries(Filter)
mdhsl Nov 4, 2025
1fd6512
Add lock while inserting & removing entries
mdhsl Nov 4, 2025
613d504
Fix getCurrentVersion of feature store
earocorn Nov 4, 2025
3bc5286
Merge branch 'fix-postgis-datastore' into fix-current-version
earocorn Nov 4, 2025
0c38d9c
Merge pull request #132 from earocorn/fix-current-version
mdhsl Nov 4, 2025
cb16f12
Remove workaround which added extra foi validtime to filter obs request
mdhsl Nov 4, 2025
d44b825
Return obs filtered by ASC Time order
mdhsl Nov 6, 2025
a652fdb
Return obs filtered by ASC Time order
mdhsl Nov 6, 2025
7534dbe
Use Sub class for Batch Obs instead of managing internal boolean to u…
mdhsl Nov 6, 2025
2cf8844
Create index on Foi id
mdhsl Nov 6, 2025
7f27e42
Create index on Foi id
mdhsl Nov 6, 2025
ba03ee2
Create index on Foi id into obs table
mdhsl Nov 6, 2025
b9bdaa1
Create index on Foi id into obs table
mdhsl Nov 6, 2025
5240c8e
Add new BatchObDatabase
mdhsl Nov 6, 2025
54a8e9d
Avoid datastream JOIN if ObsFilter provide only datastream id
mdhsl Nov 6, 2025
d1f0634
Optimize some UID search by using = when there is no regex inside and…
mdhsl Nov 7, 2025
046c7d1
Update the way to use autoCommit period
mdhsl Nov 7, 2025
a4399ad
Optimize obs query by using = instead of IN when the list of queried …
mdhsl Nov 7, 2025
19860e7
Increase connection timeout
mdhsl Nov 9, 2025
33d7a2a
working on updates to prevent doa from stopping
BillBrown341 Oct 6, 2025
ce36b41
Updates to fix DOA Output from stopping
BillBrown341 Oct 6, 2025
d60962e
[FltAware] Add flight ID to output record
alexrobin Oct 28, 2025
1bb0785
Add fileserver
earocorn Oct 29, 2025
0237a3f
Fix PostGIS config names
earocorn Nov 4, 2025
7756a8e
Fix command results, command stream/datastream filtering by valid tim…
earocorn Nov 5, 2025
c2e11bf
Fix system queries and synchronize on obs store actions
earocorn Nov 6, 2025
85fd5d4
Throw exception for adding existing feature with existing valid time
earocorn Nov 7, 2025
6b16d97
Use JSON instead of JSONB for command results to keep JSON structure
earocorn Nov 7, 2025
81392cb
Refactored command status store from JSONB to JSON for results. Refac…
earocorn Nov 8, 2025
122abf2
Merge branch 'postgis-commands' into fix-postgis-commands
earocorn Nov 11, 2025
8b72ad4
Fix fileserver merge error
earocorn Nov 11, 2025
d470bf8
Fileserver license
earocorn Nov 11, 2025
dbddfbb
Update scope and refactor to be overriden by subClasses
mdhsl Nov 13, 2025
dfbea3e
Add distinct filter to SQL query for valid time filter on Command Str…
earocorn Nov 13, 2025
50af73b
Use (datastreamID, resultTime/phenomenonTime) for index to optimize o…
earocorn Nov 17, 2025
24ee730
Update name for obs index
earocorn Nov 17, 2025
cfa6097
Fix some indexes and improve genericity
mdhsl Nov 18, 2025
3fbfa1e
Replace Timestamp With TZ by Timestamp and insert time as UTC
mdhsl Nov 18, 2025
bc5ff1d
Replace Timestamp With TZ by Timestamp and insert time as UTC
mdhsl Nov 18, 2025
ebbbaa1
Fix merge
earocorn Nov 21, 2025
918a820
Upgrade connection pool size to 50. Fix system valid time filter
earocorn Nov 21, 2025
2e45f6c
Synchronize on all add/put/remove. Query DataStream time ranges in se…
earocorn Nov 21, 2025
9cb8030
Fix value predicate paging
earocorn Nov 24, 2025
754b97e
Remove synchronized methods
earocorn Nov 25, 2025
e9334f9
Use JSONB for obs table and reorder upon deserialization
earocorn Nov 25, 2025
8c41a2d
Remove limit cap when using val predicate
earocorn Nov 25, 2025
5d9b416
Fix issue with AsynchronousSocketChannel on Windows
earocorn Nov 26, 2025
8a08079
Merge remote-tracking branch 'alex/fix-postgis-commands' into OSCAR
earocorn Nov 26, 2025
726f59f
Synchronize and null check in FFmpeg transcoding
earocorn Nov 27, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,8 @@ private HikariDataSource createHikariDataSource() {
config.setJdbcUrl("jdbc:postgresql://" + url + "/" + dbName);
config.setUsername(login);
config.setPassword(password);
config.setMaximumPoolSize(5);
// TODO: Set using postgis config
config.setMaximumPoolSize(50);
config.setConnectionTimeout(1000 * 60 * 5); // 5 minutes

// config.setMaximumPoolSize(200_000);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,14 @@
public class IteratorResultSet<T> implements Iterator<T> {
private static final Logger logger = LoggerFactory.getLogger(IteratorResultSet.class);

private long limit = Long.MAX_VALUE;
private long internalLimit;

private long filterLimit;

private long offset = 0;

private long deliveredCount = 0;

private String query;

private ConnectionManager connectionManager;
Expand All @@ -48,71 +52,136 @@ public class IteratorResultSet<T> implements Iterator<T> {

public IteratorResultSet(String query,
ConnectionManager connectionManager,
long limit,
long internalLimit,
long filterLimit,
Function<ResultSet, T> parsingFn,
Function<T, Boolean> predicateValidator
Function<T, Boolean> predicateValidator,
boolean hasValuePredicate
) {
this.query = query;
this.limit = limit;
this.query = !hasValuePredicate ? query : removeSqlLimit(query);
this.internalLimit = internalLimit;
this.filterLimit = filterLimit;
this.parsingFn = parsingFn;
this.connectionManager = connectionManager;
this.predicateValidator = predicateValidator;
this.useInternalLimit = !query.contains("LIMIT");
this.useInternalLimit = !query.contains("LIMIT") || hasValuePredicate;
}

@Override
public boolean hasNext() {
if (deliveredCount >= filterLimit) {
return false;
}
if(!records.isEmpty()) {
return true;
}
if (ended) {
return false;
}
while(records.isEmpty() && !ended) {
while(records.isEmpty() && !ended && deliveredCount < filterLimit ) {
this.makeRequest();
}
return !records.isEmpty();
}

private String getQuery() {
if(useInternalLimit) {
return query + " LIMIT " + limit + " OFFSET " + offset;
return query + " LIMIT " + internalLimit + " OFFSET " + offset;
} else {
// limit set by the filter itself
return query + " OFFSET " + offset;
}
}

private String removeSqlLimit(String sql) {
return sql.replaceAll("(?i)\\s+LIMIT\\s+\\d+(\\s+OFFSET\\s+\\d+)?", "");
}


@Override
public T next() {
return records.poll();
T value = records.poll();
if (value != null) {
deliveredCount++;
}
return value;
}

private void makeRequest() {
long countRes = 0;
long fetchedFromDb = 0;

try (Connection connection = connectionManager.getConnection()) {
try(Statement statement = connection.createStatement()) {
try (Statement statement = connection.createStatement()) {
String nextQuery = getQuery();
if(logger.isDebugEnabled()) {
if (logger.isDebugEnabled()) {
logger.debug(nextQuery);
}
try (ResultSet resultSet = statement.executeQuery(nextQuery)){
try (ResultSet resultSet = statement.executeQuery(nextQuery)) {
while (resultSet.next()) {
countRes++;
T res = this.parsingFn.apply(resultSet);
if(predicateValidator.apply(res)) {
fetchedFromDb++;

if (deliveredCount + records.size() >= filterLimit) {
ended = true;
break;
}

T res = parsingFn.apply(resultSet);

if (predicateValidator.apply(res)) {
records.add(res);
if (deliveredCount + records.size() >= filterLimit) {
ended = true;
break;
}
}
}
offset += limit;
}
}
if(countRes == 0 || countRes < limit) {

// Move offset only when internal limit is active
if (useInternalLimit) {
offset += internalLimit;
} else {
// If SQL LIMIT is controlling the batch size, we must rely on SQL side
offset += filterLimit > 0 ? filterLimit : internalLimit;
}

// If DB returned fewer rows than internalLimit, this is the final page
if (fetchedFromDb == 0 ||
(useInternalLimit && fetchedFromDb < internalLimit)) {
ended = true;
}

} catch (SQLException e) {
throw new RuntimeException(e);
}
}


// private void makeRequest() {
// long countRes = 0;
// try (Connection connection = connectionManager.getConnection()) {
// try(Statement statement = connection.createStatement()) {
// String nextQuery = getQuery();
// if(logger.isDebugEnabled()) {
// logger.debug(nextQuery);
// }
// try (ResultSet resultSet = statement.executeQuery(nextQuery)){
// while (resultSet.next()) {
// countRes++;
// T res = this.parsingFn.apply(resultSet);
// if(predicateValidator.apply(res)) {
// records.add(res);
// }
// }
// offset += limit;
// }
// }
// if(countRes == 0 || countRes < limit) {
// ended = true;
// }
// } catch (SQLException e) {
// throw new RuntimeException(e);
// }
// }
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.sensorhub.api.datastore.system.ISystemDescStore;

public abstract class QueryBuilder {
private final String tableName;
protected final String tableName;

protected IDataStreamStore dataStreamStore;
protected IProcedureStore procedureStore;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,15 @@

import java.util.Set;

import static org.sensorhub.api.datastore.command.ICommandStatusStore.CommandStatusField.COMMAND_ID;
import static org.sensorhub.api.datastore.command.ICommandStatusStore.CommandStatusField.*;

public class QueryBuilderCommandStatusStore extends QueryBuilder {
public final static String COMMAND_TABLE_NAME = "commandstatus";

protected static final String PROGRESS = "progress";
protected static final String MESSAGE = "message";
protected static final String RESULT = "result";

public QueryBuilderCommandStatusStore() {
super(COMMAND_TABLE_NAME);
}
Expand All @@ -37,17 +42,32 @@ public String createTableQuery() {
return "CREATE TABLE "+this.getStoreTableName()+
" (" +
"id BIGSERIAL PRIMARY KEY,"+
COMMAND_ID + " bigint, "+
"data jsonb"+
COMMAND_ID+" BIGINT, "+
PROGRESS+" INT, "+
REPORT_TIME+" TIMESTAMP, "+
STATUS_CODE+" VARCHAR, "+
EXEC_TIME+" tsrange, "+
MESSAGE+" VARCHAR, "+
RESULT+" json"+
")";
}

public String insertCommandQuery() {
return "INSERT INTO "+this.getStoreTableName()+" (commandid, data) VALUES (?, ?)";
return "INSERT INTO "+this.getStoreTableName()+" " +
"(commandid, progress, reportTime, statusCode, executionTime, message, result) " +
"VALUES (?, ?, ?, ?, ?, ?, ?)";
}

public String updateByIdQuery() {
return "UPDATE "+this.getStoreTableName()+" SET data = ?, WHERE id = ?";
return "UPDATE "+this.getStoreTableName()+" SET "+
COMMAND_ID+" = ?, " +
PROGRESS+" = ?, " +
REPORT_TIME+" = ?, " +
STATUS_CODE+" = ?, " +
EXEC_TIME+" = ?, " +
MESSAGE+" = ?, " +
RESULT+" = ?, " +
"WHERE id = ?";
}

public String createSelectEntriesQuery(CommandStatusFilter filter, Set<ICommandStatusStore.CommandStatusField> fields) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,16 @@ public String createTableQuery() {
COMMANDSTREAM_ID +" BIGINT, "+
SENDER_ID+" VARCHAR,"+
FOI_ID+" bigint,"+
ISSUE_TIME+" TIMESTAMPTZ,"+
ISSUE_TIME+" TIMESTAMP,"+
STATUS+" VARCHAR,"+
ERROR_MSG+" VARCHAR,"+
PARAMETERS+" JSONB" +
PARAMETERS+" JSON" +
")";
}

public String createDataIndexQuery() {
return "CREATE INDEX "+this.getStoreTableName()+"_data_idx on "+this.getStoreTableName()+" USING GIN("+PARAMETERS+")";
}
// public String createDataIndexQuery() {
// return "CREATE INDEX "+this.getStoreTableName()+"_data_idx on "+this.getStoreTableName()+" ("+PARAMETERS+")";
// }

public String createCommandStreamIndexQuery() {
return "CREATE INDEX "+this.getStoreTableName()+"_commandstream_idx on "+this.getStoreTableName()+" ("+COMMANDSTREAM_ID+")";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.util.Set;

public class QueryBuilderCommandStreamStore extends QueryBuilder {
public final static String COMMAND_STREAM_TABLE_NAME = "commandstream";
public final static String COMMAND_STREAM_TABLE_NAME = "commandstreams";

public QueryBuilderCommandStreamStore() {
this(COMMAND_STREAM_TABLE_NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ public String insertInfoQuery() {
}

public String getAllDataStreams(Instant min, Instant max) {
return "SELECT id FROM "+this.getStoreTableName()+" WHERE tstzrange((data->'validTime'->>'begin')::timestamptz," +
"(data->'validTime'->>'end')::timestamptz) <@ '["+ PostgisUtils.checkAndGetValidInstant(min)+","+PostgisUtils.checkAndGetValidInstant(max)+"]'::tstzrange";
return "SELECT id FROM "+this.getStoreTableName()+" WHERE tsrange((data->'validTime'->>'begin')::timestamp," +
"(data->'validTime'->>'end')::timestamp) <@ '["+ PostgisUtils.checkAndGetValidInstant(min)+","+PostgisUtils.checkAndGetValidInstant(max)+"]'::tsrange";
}

public String createSelectEntriesQuery(DataStreamFilter filter, Set<IDataStreamStore.DataStreamInfoField> fields) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,35 +39,34 @@ public QueryBuilderObsStore(String tableName) {
}

public String createTableQuery() {
return "CREATE TABLE "+this.getStoreTableName()+
return "CREATE TABLE IF NOT EXISTS "+this.getStoreTableName()+
" (" +
"id BIGSERIAL PRIMARY KEY,"+
DATASTREAM_ID +" bigint, "+
FOI_ID+" bigint,"+
PHENOMENON_TIME+" TIMESTAMPTZ,"+
RESULT_TIME+" TIMESTAMPTZ,"+
RESULT+" JSON" + // VERSUS JSONB but the parser does not keep order
PHENOMENON_TIME+" TIMESTAMP,"+
RESULT_TIME+" TIMESTAMP,"+
RESULT+" JSONB" + // VERSUS JSONB but the parser does not keep order
")";
}
public String createDataIndexQuery() {
return "CREATE INDEX "+this.getStoreTableName()+"_data_idx on "+this.getStoreTableName()+" USING GIN("+RESULT+")";
// return "CREATE INDEX "+this.getStoreTableName()+"_data_idx on "+this.getStoreTableName()+" ("+RESULT+")";
}

public String createDataStreamIndexQuery() {
return "CREATE INDEX "+this.getStoreTableName()+"_datastream_idx on "+this.getStoreTableName()+" ("+DATASTREAM_ID+")";
return "CREATE INDEX IF NOT EXISTS "+this.getStoreTableName()+"_datastream_idx on "+this.getStoreTableName()+" ("+DATASTREAM_ID+")";
}

public String createPhenomenonTimeIndexQuery() {
return "CREATE INDEX "+this.getStoreTableName()+"_phenomenon_time_idx on "+this.getStoreTableName()+" ("+ PHENOMENON_TIME +")";
return "CREATE INDEX IF NOT EXISTS "+this.getStoreTableName()+"_datastream_id_phenomenon_time_idx on "+this.getStoreTableName()+" ("+ DATASTREAM_ID + ", " + PHENOMENON_TIME +")";
}

public String createResultTimeIndexQuery() {
return "CREATE INDEX "+this.getStoreTableName()+"_result_time_idx on "+this.getStoreTableName()+" ("+ RESULT_TIME +")";
return "CREATE INDEX IF NOT EXISTS "+this.getStoreTableName()+"_datastream_id_result_time_idx on "+this.getStoreTableName()+" ("+ DATASTREAM_ID + ", " + RESULT_TIME +")";
}

public String createFoiIndexQuery() {
return "CREATE INDEX "+this.getStoreTableName()+"_foi_idx on "+this.getStoreTableName()+" ("+ FOI_ID +")";
return "CREATE INDEX IF NOT EXISTS "+this.getStoreTableName()+"_foi_idx on "+this.getStoreTableName()+" ("+ FOI_ID +")";
}

public String insertObsQuery() {
Expand All @@ -77,7 +76,7 @@ public String insertObsQuery() {
}

public String createUniqueConstraint() {
return "CREATE UNIQUE INDEX "+this.getStoreTableName()+"_unique_constraint on "+this.getStoreTableName()+" (dataStreamID, foiID, phenomenonTime, resultTime)";
return "CREATE UNIQUE INDEX IF NOT EXISTS "+this.getStoreTableName()+"_unique_constraint on "+this.getStoreTableName()+" (dataStreamID, foiID, phenomenonTime, resultTime)";
}

public String updateByIdQuery() {
Expand All @@ -93,6 +92,15 @@ public String getPhenomenonTimeRangeByDataStreamIdQuery(long dataStreamID) {
return "SELECT Min("+PHENOMENON_TIME+"),Max("+PHENOMENON_TIME+") FROM "+this.getStoreTableName()+" WHERE "+DATASTREAM_ID+" = "+dataStreamID;
}

public String getPhenomenonTimeRangeByDataStreamIdsQuery() {
return "SELECT " + DATASTREAM_ID + ", " +
" MIN(" + PHENOMENON_TIME + ") AS min, " +
" MAX(" + PHENOMENON_TIME + ") AS max " +
"FROM " + getStoreTableName() +
" WHERE " + DATASTREAM_ID + " = ANY (?) " +
"GROUP BY " + DATASTREAM_ID;
}

public String getBinCountByPhenomenontime(long seconds, List<Long> dsIds, List<Long> foiIds) {
StringBuilder idsQuery = new StringBuilder();
for(int i=0;i < dsIds.size();i++) {
Expand All @@ -118,6 +126,15 @@ public String getResultTimeRangeByDataStreamIdQuery(long dataStreamID) {
return "SELECT Min("+RESULT_TIME+"),Max("+RESULT_TIME+") FROM "+this.getStoreTableName()+" WHERE "+DATASTREAM_ID+" = "+dataStreamID;
}

public String getResultTimeRangeByDataStreamIdsQuery() {
return "SELECT " + DATASTREAM_ID + ", " +
" MIN(" + RESULT_TIME + ") AS min, " +
" MAX(" + RESULT_TIME + ") AS max " +
"FROM " + getStoreTableName() +
" WHERE " + DATASTREAM_ID + " = ANY (?) " +
"GROUP BY " + DATASTREAM_ID;
}

public String countByPhenomenonTimeRangeQuery(Instant min, Instant max) {
String minTimestamp = min.toString();
if(min == Instant.MIN) {
Expand All @@ -127,8 +144,8 @@ public String countByPhenomenonTimeRangeQuery(Instant min, Instant max) {
if(max == Instant.MAX) {
maxTimestamp = "infinity";
}
String sb = "tstzrange((" + this.getStoreTableName() + "."+PHENOMENON_TIME+")::timestamptz," +
" (" + this.getStoreTableName() + "."+PHENOMENON_TIME+")::timestamptz)" +
String sb = "tsrange((" + this.getStoreTableName() + "."+PHENOMENON_TIME+")::timestamp," +
" (" + this.getStoreTableName() + "."+PHENOMENON_TIME+")::timestamp)" +
" && '[" + minTimestamp + "," + maxTimestamp + "]'";

return "SELECT COUNT(*) FROM "+this.getStoreTableName()+" "+sb;
Expand Down
Loading