Skip to content

Commit

Permalink
GDC draft
Browse files Browse the repository at this point in the history
  • Loading branch information
m-mohr committed Aug 4, 2023
1 parent b6955ac commit 84a0469
Show file tree
Hide file tree
Showing 3 changed files with 285 additions and 3 deletions.
6 changes: 4 additions & 2 deletions src/connection.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ const AuthProvider = require('./authprovider');
const BasicProvider = require('./basicprovider');
const OidcProvider = require('./oidcprovider');

const Capabilities = require('./capabilities');
const { GdcCapabilities, Migrate } = require('./gdc');
const FileTypes = require('./filetypes');
const UserFile = require('./userfile');
const Job = require('./job');
Expand All @@ -18,6 +18,7 @@ const Service = require('./service');
const Builder = require('./builder/builder');
const BuilderNode = require('./builder/node');


const CONFORMANCE_RELS = [
'conformance',
'http://www.opengis.net/def/rel/ogc/1.0/conformance'
Expand Down Expand Up @@ -120,7 +121,8 @@ class Connection {
}
}

this.capabilitiesObject = new Capabilities(data);
Migrate.connection = this;
this.capabilitiesObject = new GdcCapabilities(data);
return this.capabilitiesObject;
}

Expand Down
276 changes: 276 additions & 0 deletions src/gdc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,276 @@
const Capabilities = require("./capabilities");
const Utils = require('@openeo/js-commons/src/utils');
const StacMigrate = require('@radiantearth/stac-migrate');

class GdcCapabilities extends Capabilities {

Check failure on line 5 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

Missing JSDoc comment

constructor(data) {

Check failure on line 7 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

Missing JSDoc comment
super(data);
Object.assign(this.featureMap, {
describeCoverage: 'get /collections/{collection_id}/coverage',
describeCoverageDomainset: 'get /collections/{collection_id}/coverage/domainset',
describeCoverageRangetype: 'get /collections/{collection_id}/coverage/rangetype',
describeCoverageRangeset: 'get /collections/{collection_id}/coverage/rangeset',
describeCoverageMetadata: 'get /collections/{collection_id}/coverage/metadata',
});
this.checkConformance();
}

hasConformance(uri) {

Check failure on line 19 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

Missing JSDoc comment
if(!Array.isArray(this.data.conformsTo)) {
return false;
}
return this.data.conformsTo.includes(uri);
}

checkConformance() {

Check failure on line 26 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

Missing JSDoc comment
if (!Array.isArray(this.data.endpoints)) {
this.data.endpoints = [];
}
if (this.hasConformance('http://www.opengis.net/spec/ogcapi-coverages-1/0.0/conf/geodata-coverage')) {
this.data.endpoints.push({
"path": "/collections",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/collections/{collection_id}",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/collections/{collection_id}/coverage",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/collections/{collection_id}/coverage",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/collections/{collection_id}/coverage/domainset",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/collections/{collection_id}/coverage/rangetype",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/collections/{collection_id}/coverage/rangeset",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/collections/{collection_id}/coverage/metadata",
"methods": ["GET"]
});
}
if (this.hasConformance('http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/core')) {
this.data.endpoints.push({
"path": "/processes",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/jobs",
"methods": ["GET"]
});
this.data.endpoints.push({
"path": "/jobs/{job_id}",
"methods": ["GET", "DELETE"]
});
}
this.init();
}

/**
* Initializes the class.
*
* @protected
*/
init() {
if (Array.isArray(this.data.endpoints)) {
super.init();
}
}

/**
* Validates the capabilities.
*
* Throws an error in case of an issue, otherwise just passes.
*
* @protected
* @throws {Error}
*/
validate() {
if(!Utils.isObject(this.data)) {
throw new Error("No capabilities retrieved.");
}
}

/**
* Returns the openEO API version implemented by the back-end.
*
* @returns {string} openEO API version number.
*/
apiVersion() {
return this.data.api_version;
}

/**
* Returns the GDC API version implemented by the back-end.
*
* @returns {string} GDC API version number.
*/
gdcVersion() {
return this.data.gdc_version || "1.0.0-beta.1";
}

isEndpoint(response, method, endpoint) {

Check failure on line 124 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

Missing JSDoc comment
if (response.config.method !== method) {
return false;
}
if (endpoint.includes('{}')) {
let pattern = '^' + endpoint.replace('{}', '[^/]+') + '$';
let regex = new RegExp(pattern);
return regex.test(response.config.url);
}
return endpoint === response.config.url;
}

/**
* Migrates a response, if required.
*
* @param {AxiosResponse} response
* @protected
* @returns {AxiosResponse}
*/
migrate(response) {
if (this.isEndpoint(response, 'get', '/collections')) {
response.data.collections = response.data.collections.map(collection => Migrate.collection(collection, response));

Check failure on line 145 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

'Migrate' was used before it was defined
}
else if (this.isEndpoint(response, 'get', '/collections/{}')) {
response.data = Migrate.collection(response.data, response);

Check failure on line 148 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

'Migrate' was used before it was defined
}
else if (this.isEndpoint(response, 'get', '/processes')) {
response.data.processes = response.data.processes.map(process => Migrate.process(process, response));

Check failure on line 151 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

'Migrate' was used before it was defined
}
else if (this.isEndpoint(response, 'get', '/jobs')) {
response.data.jobs = response.data.jobs.map(job => Migrate.job(job, response));

Check failure on line 154 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

'Migrate' was used before it was defined
}
else if (this.isEndpoint(response, 'get', '/jobs/{}')) {
response.data = Migrate.job(response.data, response);

Check failure on line 157 in src/gdc.js

View workflow job for this annotation

GitHub Actions / deploy

'Migrate' was used before it was defined
}

response = Migrate.all(response);

return response;
}
}

const JobStatusMap = {
accepted: 'created',
running: 'running',
successful: 'finished',
failed: 'error',
dismissed: 'canceled'
};

const Migrate = {

connection: null,

all(response) {
if (Array.isArray(response.data.links)) {
response.data.links = this.connection.makeLinksAbsolute(response.data.links, response);
}
return response;
},

collection(collection, response) {
if (collection.stac_version) {
return collection;
}

// Make sure the required properties are present
collection = StacMigrate.collection(collection);
// Make links absolute
if (Array.isArray(collection.links)) {
collection.links = this.connection.makeLinksAbsolute(collection.links, response);
}

return collection;
},

process(process, response) {
if (process.parameters || process.returns) {
return process;
}

process.summary = process.title;

process.parameters = [];
for(let name in process.inputs) {
let input = process.inputs[name];
process.parameters.push({
name,
description: [input.title, input.description].filter(v => Boolean(v)).join("\n\n"),
schema: input.schema,
optional: typeof input.schema.default !== 'undefined'
});
}

if (Utils.size(process.outputs) === 1) {
let output = Object.values(process.outputs)[0];
process.returns = {
description: [output.title, output.description].filter(v => Boolean(v)).join("\n\n"),
schema: output.schema
};
}
else {
process.returns = {
description: 'see process description',
schema: []
};
for(let name in process.outputs) {
let output = process.outputs[name];
let schema = Object.assign({}, output.schema, {title: output.title, description: output.description});
process.returns.schema.push(schema);
}
}

// Make links absolute
if (Array.isArray(process.links)) {
process.links = this.connection.makeLinksAbsolute(process.links, response);
}

return process;
},

job(job, response) {
if (job.type !== 'process') {
return job;
}

job.id = job.jobID;
job.process = {
process_graph: {
[job.processID]: {
process_id: job.processID,
arguments: {}
}
}
};
job.status = JobStatusMap[job.status];
job.created = job.started;
job.updated = job.finished;
job.description = job.message;

if (Array.isArray(job.links)) {
job.links = this.connection.makeLinksAbsolute(job.links, response);
}

return job;
},

};

module.exports = {
GdcCapabilities,
Migrate
};
6 changes: 5 additions & 1 deletion src/openeo.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ const Formula = require('./builder/formula');

const MIN_API_VERSION = '1.0.0-rc.2';
const MAX_API_VERSION = '1.x.x';
const GDC_VERSION = '1.0.0-beta.1';

/**
* Main class to start with openEO. Allows to connect to a server.
Expand Down Expand Up @@ -94,9 +95,12 @@ class OpenEO {

// Check whether back-end is accessible and supports a compatible version.
let capabilities = await connection.init();
if (Versions.compare(capabilities.apiVersion(), MIN_API_VERSION, "<") || Versions.compare(capabilities.apiVersion(), MAX_API_VERSION, ">")) {
if (capabilities.apiVersion() && (Versions.compare(capabilities.apiVersion(), MIN_API_VERSION, "<") || Versions.compare(capabilities.apiVersion(), MAX_API_VERSION, ">"))) {
throw new Error("Client only supports the API versions between " + MIN_API_VERSION + " and " + MAX_API_VERSION);
}
if (!Versions.compare(capabilities.gdcVersion(), GDC_VERSION, "=")) {
throw new Error("Client only supports the GDC version " + GDC_VERSION);
}

return connection;
}
Expand Down

0 comments on commit 84a0469

Please sign in to comment.