Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
@Component
@ConfigurationProperties(prefix = "wfs-default-param")
public class WfsDefaultParam {

public static final String WFS_LINK_MARKER = "Data Access > wfs";

private Map<String, String> fields;
private Map<String, String> download;
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.util.List;
import java.util.Optional;
import static au.org.aodn.ogcapi.server.core.configuration.CacheConfig.DOWNLOADABLE_FIELDS;
import static au.org.aodn.ogcapi.server.core.service.wfs.WfsDefaultParam.WFS_LINK_MARKER;

@Slf4j
public class WfsServer {
Expand Down Expand Up @@ -111,7 +112,7 @@ protected Optional<List<String>> getAllFeatureServerUrls(String collectionId) {
.filter(link -> link.getAiGroup() != null)
.filter(link ->
// This is the pattern for wfs link
link.getAiGroup().contains("Data Access > wfs") ||
link.getAiGroup().contains(WFS_LINK_MARKER) ||
// The data itself can be unclean, ows is another option where it works with wfs
link.getHref().contains("/ows")
)
Expand All @@ -136,7 +137,7 @@ public Optional<String> getFeatureServerUrlByTitle(String collectionId, String l
return model.getLinks()
.stream()
.filter(link -> link.getAiGroup() != null)
.filter(link -> link.getAiGroup().contains("Data Access > wfs") && link.getTitle().equalsIgnoreCase(layerName))
.filter(link -> link.getAiGroup().contains(WFS_LINK_MARKER) && link.getTitle().equalsIgnoreCase(layerName))
.map(LinkModel::getHref)
.findFirst();
} else {
Expand All @@ -157,7 +158,7 @@ public Optional<String> getFeatureServerUrlByTitleOrQueryParam(String collection
return model.getLinks()
.stream()
.filter(link -> link.getAiGroup() != null)
.filter(link -> link.getAiGroup().contains("Data Access > wfs"))
.filter(link -> link.getAiGroup().contains(WFS_LINK_MARKER))
.filter(link -> {
Optional<String> name = extractTypenameFromUrl(link.getHref());
return link.getTitle().equalsIgnoreCase(layerName) ||
Expand Down Expand Up @@ -247,7 +248,7 @@ public List<LayerInfo> filterLayersByWfsLinks(String collectionId, List<LayerInf
List<LinkModel> wfsLinks = model.getLinks()
.stream()
.filter(link -> link.getAiGroup() != null)
.filter(link -> link.getAiGroup().contains("Data Access > wfs"))
.filter(link -> link.getAiGroup().contains(WFS_LINK_MARKER))
.toList();

if (wfsLinks.isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
@Component
@ConfigurationProperties(prefix = "wms-default-param")
public class WmsDefaultParam {

public static final String WMS_LINK_MARKER = "Data Access > wms";

private Map<String, String> wfs;
private Map<String, String> ncwfs;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import java.util.stream.Stream;

import static au.org.aodn.ogcapi.server.core.configuration.CacheConfig.CACHE_WMS_MAP_TILE;
import static au.org.aodn.ogcapi.server.core.service.wms.WmsDefaultParam.WMS_LINK_MARKER;

@Slf4j
public class WmsServer {
Expand Down Expand Up @@ -65,63 +66,90 @@ public WmsServer() {
xmlMapper.registerModule(new JavaTimeModule()); // Add JavaTimeModule
xmlMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}

/**
* This function is used to append the CQL filter to the geonetwork query, it will guess the correct dataTime field by
* some logic, so that if user select filter by range, it works. In case of issue please debug the logic as we are
* dealing with different non-standard name
* @param uuid - The uuid of metadata
* @param request - The request object to the map
* @return - The CQL combined the wfs cql and the dateTime query.
*/
protected String createCQLFilter(String uuid, FeatureRequest request) {
String cql = "";

// If the metadata record have wfs url query, we will use it and analysis it and extract the CQL part if exist
Optional<String> wfsUrl = wfsServer.getFeatureServerUrlByTitleOrQueryParam(uuid, request.getLayerName());
if(wfsUrl.isPresent()) {
UriComponents wfsUrlComponents = UriComponentsBuilder.fromUriString(wfsUrl.get()).build();
// Extract the CQL if existing in the WFS, we need to apply it to the WMS as well
if(wfsUrlComponents.getQueryParams().get("cql_filter") != null) {
cql = wfsUrlComponents.getQueryParams().get("cql_filter").get(0);
}
else if(wfsUrlComponents.getQueryParams().get("CQL_FILTER") != null) {
cql = wfsUrlComponents.getQueryParams().get("CQL_FILTER").get(0);
}
}

if (request.getDatetime() != null) {
// Special handle for date time field, the field name will be diff across dataset. So we need
// to look it up
String cql = "";
try {
Optional<String> wfsUrl = wfsServer.getFeatureServerUrlByTitleOrQueryParam(uuid, request.getLayerName());
if(wfsUrl.isPresent()) {
UriComponents wfsUrlComponents = UriComponentsBuilder.fromUriString(wfsUrl.get()).build();
// Extract the CQL if existing in the WFS, we need to apply it to the WMS as well
if(wfsUrlComponents.getQueryParams().get("cql_filter") != null) {
cql = wfsUrlComponents.getQueryParams().get("cql_filter").get(0) + " AND ";
}
else if(wfsUrlComponents.getQueryParams().get("CQL_FILTER") != null) {
cql = wfsUrlComponents.getQueryParams().get("CQL_FILTER").get(0) + " AND ";
}
}

List<DownloadableFieldModel> m = this.getDownloadableFields(uuid, request);
List<DownloadableFieldModel> target = m.stream()
.filter(value -> "dateTime".equalsIgnoreCase(value.getType()))
.toList();

if (!target.isEmpty()) {

List<DownloadableFieldModel> range;
if (target.size() > 2) {
// Try to find possible fields where it contains start end min max
target = target.stream()
range = target.stream()
.filter(v -> Stream.of("start", "end", "min", "max").anyMatch(k -> v.getName().contains(k)))
.toList();
}

if (target.size() == 2) {
// Due to no standard name, we try our best to guess if 2 dateTime field
String[] d = request.getDatetime().split("/");
String guess1 = target.get(0).getName();
String guess2 = target.get(1).getName();
if ((guess1.contains("start") || guess1.contains("min")) && (guess2.contains("end") || guess2.contains("max"))) {
return String.format("CQL_FILTER=%s%s >= %s AND %s <= %s", cql, guess1, d[0], guess2, d[1]);
}
if ((guess2.contains("start") || guess2.contains("min")) && (guess1.contains("end") || guess1.contains("max"))) {
return String.format("CQL_FILTER=%s%s >= %s AND %s <= %s", cql, guess2, d[0], guess2, d[1]);
if (range.size() == 2) {
// Due to no standard name, we try our best to guess if 2 dateTime field, range mean we found start/end date
String[] d = request.getDatetime().split("/");
String guess1 = target.get(0).getName();
String guess2 = target.get(1).getName();

if ((guess1.contains("start") || guess1.contains("min")) && (guess2.contains("end") || guess2.contains("max"))) {
String timeCql = String.format("CQL_FILTER=%s >= %s AND %s <= %s", guess1, d[0], guess2, d[1]);
return "".equalsIgnoreCase(cql) ? timeCql : timeCql + " AND " + cql;
}
if ((guess2.contains("start") || guess2.contains("min")) && (guess1.contains("end") || guess1.contains("max"))) {
String timeCql = String.format("CQL_FILTER=%s >= %s AND %s <= %s", guess2, d[0], guess2, d[1]);
return "".equalsIgnoreCase(cql) ? timeCql : timeCql + " AND " + cql;
}
return "".equalsIgnoreCase(cql) ? "" : cql;
} else {
// There are more than 1 dateTime field, it is not range type, so we try to guess the individual one
// based on some common name. Add more if needed
List<DownloadableFieldModel> individual = target.stream()
.filter(v -> Stream.of("juld", "time").anyMatch(k -> v.getName().equalsIgnoreCase(k)))
.toList();

if(individual.size() == 1) {
log.debug("Map datetime field to name to [{}]", individual.get(0).getName());
String timeCql = String.format("CQL_FILTER=%s DURING %s", individual.get(0).getName(), request.getDatetime());
return "".equalsIgnoreCase(cql) ? timeCql : timeCql + " AND " + cql;
}
}
} else {
// Only 1 field so use it.
log.debug("Map datetime field to name to [{}]", target.get(0).getName());
return String.format("CQL_FILTER=%s%s DURING %s", cql, target.get(0).getName(), request.getDatetime());
}
else if(target.size() == 1) {
log.debug("Map datetime field to name to the only dateTime field [{}]", target.get(0).getName());
String timeCql = String.format("CQL_FILTER=%s DURING %s", target.get(0).getName(), request.getDatetime());
return "".equalsIgnoreCase(cql) ? timeCql : timeCql + " AND " + cql;
}
}
log.error("No date time field found from query for uuid {}, result will not be bounded by date time", uuid);
} catch (DownloadableFieldsNotFoundException dfnf) {
// Without field, we cannot create a valid CQL filte targeting a dateTime, so just return empty
return "";
log.error("No date time field found for uuid {}, result will not be bounded by date time even specified", uuid);
}
catch (DownloadableFieldsNotFoundException dfnf) {
// Without field, we cannot create a valid CQL filte targeting a dateTime, so just return existing CQL if exist
}
}
return "";
return "".equalsIgnoreCase(cql) ? "" : String.format("CQL_FILTER=%s", cql);
}
/**
* Create the full WMS url to fetch the tiles image
Expand Down Expand Up @@ -355,7 +383,7 @@ protected Optional<String> getMapServerUrl(String collectionId, FeatureRequest r
List<LinkModel> wmsLinks = model.getLinks()
.stream()
.filter(link -> link.getAiGroup() != null)
.filter(link -> link.getAiGroup().contains("Data Access > wms"))
.filter(link -> link.getAiGroup().contains(WMS_LINK_MARKER))
.toList();

if (wmsLinks.isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import java.util.ArrayList;
import java.util.List;

import static au.org.aodn.ogcapi.server.core.service.wfs.WfsDefaultParam.WFS_LINK_MARKER;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
Expand Down Expand Up @@ -124,7 +125,7 @@ public void testGetDownloadableFieldsSuccess() {
LinkModel.builder()
.href("http://geoserver-123.aodn.org.au/geoserver/ows")
.title(request.getLayerName())
.aiGroup("Data Access > wfs")
.aiGroup(WFS_LINK_MARKER)
.build())
)
.build()
Expand Down Expand Up @@ -194,7 +195,7 @@ public void testGetDownloadableFieldsEmptyResponse() {
LinkModel.builder()
.href("http://geoserver-123.aodn.org.au/geoserver/ows")
.title(request.getLayerName())
.aiGroup("Data Access > wfs")
.aiGroup(WFS_LINK_MARKER)
.build())
)
.build()
Expand Down Expand Up @@ -232,7 +233,7 @@ public void testGetDownloadableFieldsWfsError() {
LinkModel.builder()
.href("http://geoserver-123.aodn.org.au/geoserver/ows")
.title(request.getLayerName())
.aiGroup("Data Access > wfs")
.aiGroup(WFS_LINK_MARKER)
.build())
)
.build()
Expand Down Expand Up @@ -267,7 +268,7 @@ public void testGetDownloadableFieldsNetworkError() {
LinkModel.builder()
.href("http://geoserver-123.aodn.org.au/geoserver/ows")
.title(request.getLayerName())
.aiGroup("Data Access > wfs")
.aiGroup(WFS_LINK_MARKER)
.build())
)
.build()
Expand Down
Loading