Skip to content

Commit

Permalink
Fixed: #4569 - Import SQL script in Schema
Browse files Browse the repository at this point in the history
  • Loading branch information
delchev committed Jan 16, 2025
1 parent b12cbb5 commit f1fa783
Show file tree
Hide file tree
Showing 8 changed files with 229 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ public ResponseEntity<?> importDataInTable(@Validated @PathVariable("datasource"
if (logger.isErrorEnabled()) {
logger.error(e.getMessage(), e);
}
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Project upload failed: " + e.getMessage());
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Data upload failed: " + e.getMessage());
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
/*
* Copyright (c) 2024 Eclipse Dirigible contributors
*
* All rights reserved. This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v2.0 which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v20.html
*
* SPDX-FileCopyrightText: Eclipse Dirigible contributors SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.dirigible.components.data.export.endpoint;

import static java.text.MessageFormat.format;

import java.io.IOException;
import java.io.InputStream;

import org.eclipse.dirigible.components.base.endpoint.BaseEndpoint;
import org.eclipse.dirigible.components.data.export.service.DataImportService;
import org.eclipse.dirigible.components.data.management.service.DatabaseMetadataService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.server.ResponseStatusException;

/**
* Front facing REST service serving sql processing.
*/
@RestController
@RequestMapping(BaseEndpoint.PREFIX_ENDPOINT_DATA + "sql")
public class DataSQLEndpoint {

/** The constant logger. */
private static final Logger logger = LoggerFactory.getLogger(DataSQLEndpoint.class);

/**
* The database metadata service.
*/
private final DatabaseMetadataService databaseMetadataService;

/**
* The database metadata service.
*/
private final DataImportService dataImportService;

/**
* Instantiates a new data export endpoint.
*
* @param databaseMetadataService the database metadata service
* @param dataImportService the data import service
*/
public DataSQLEndpoint(DatabaseMetadataService databaseMetadataService, DataImportService dataImportService) {
this.databaseMetadataService = databaseMetadataService;
this.dataImportService = dataImportService;
}


/**
* Process SQL in schema.
*
* @param datasource the datasource
* @param schema the schema
* @param file the file
* @return the response entity
* @throws Exception the exception
*/
@PostMapping(value = "/{datasource}/{schema}", consumes = "multipart/form-data", produces = "application/json")
public ResponseEntity<?> importDataInTable(@Validated @PathVariable("datasource") String datasource,
@Validated @PathVariable("schema") String schema, @Validated @RequestParam("file") MultipartFile file) throws Exception {
try {
return processSQL(datasource, schema, file);
} catch (IOException e) {
if (logger.isErrorEnabled()) {
logger.error(e.getMessage(), e);
}
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "SQL upload failed: " + e.getMessage());
}
}


/**
* Process SQL.
*
* @param datasource the datasource
* @param schema the schema
* @param file the file
* @return the response entity
* @throws Exception the exception
*/
private ResponseEntity<?> processSQL(String datasource, String schema, MultipartFile file) throws Exception {

if (!databaseMetadataService.existsDataSourceMetadata(datasource)) {
String error = format("Datasource {0} does not exist.", datasource);
throw new ResponseStatusException(HttpStatus.NOT_FOUND, error);
}

InputStream is = file.getInputStream();
dataImportService.processSQL(datasource, schema, is);
return ResponseEntity.ok()
.build();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -85,4 +85,19 @@ public void importData(String datasource, String schema, String table, InputStre
importData(datasource, schema, table, true, true, ",", "\"", null, false, is);
}

/**
* Process SQL.
*
* @param datasource the datasource
* @param schema the schema
* @param is the is
*/
public void processSQL(String datasource, String schema, InputStream is) throws Exception {
DirigibleDataSource dataSource = datasourceManager.getDataSource(datasource);
try (Connection connection = dataSource.getConnection()) {
ISqlDialect dialect = SqlDialectFactory.getDialect(dataSource);
dialect.processSQL(connection, schema, is);
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ database.controller('DatabaseController', function ($scope, $http, messageHub) {
}.bind(this)
};
}

// Export metadata
ctxmenu.exportMetadata = {
"separator_before": true,
Expand Down Expand Up @@ -414,8 +414,18 @@ database.controller('DatabaseController', function ($scope, $http, messageHub) {

// Schema related actions
if (node.original.kind === 'schema') {
ctxmenu.exportData = {
ctxmenu.importScript = {
"separator_before": false,
"label": "Import SQL",
"action": function (data) {
let tree = $.jstree.reference(data.reference);
let node = tree.get_node(data.reference);
let sqlCommand = node.original.text;
messageHub.postMessage('database.data.import.sql', sqlCommand);
}.bind(this)
};
ctxmenu.exportData = {
"separator_before": true,
"label": "Export Data",
"action": function (data) {
let tree = $.jstree.reference(data.reference);
Expand Down Expand Up @@ -485,7 +495,7 @@ database.controller('DatabaseController', function ($scope, $http, messageHub) {
}.bind(this)
};
}

// Collection related actions
if (node.original.kind === 'table' && node.original.type === 'collection') {
ctxmenu.showContents = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,12 @@ importView.controller('ImportViewController', [
$scope.dropAreaTitle = 'Import data files';
$scope.dropAreaSubtitle = 'Drop file(s) here, or use the "+" button.';
$scope.dropAreaMore = `Files will be imported in "${params.table}"`;
} if (params.importType === 'sql') {
$scope.inputAccept = 'sql';
$scope.importType = params.importType;
$scope.dropAreaTitle = 'Import SQL files';
$scope.dropAreaSubtitle = 'Drop file(s) here, or use the "+" button.';
$scope.dropAreaMore = `Files will be imported in "${params.schema}"`;
} else {
$scope.dropAreaTitle = 'Import files from zip';
$scope.dropAreaMore = `Files will be extracted in "${params.uploadPath}"`;
Expand All @@ -85,7 +91,7 @@ importView.controller('ImportViewController', [
name: 'customFilter',
fn: function (item /*{File|FileLikeObject}*/, options) {
let type = item.type.slice(item.type.lastIndexOf('/') + 1);
if ($scope.importType !== 'file' && $scope.importType !== 'data')
if ($scope.importType !== 'file' && $scope.importType !== 'data' && $scope.importType !== 'sql')
if (type != 'zip' && type != 'x-zip' && type != 'x-zip-compressed') {
return false;
}
Expand All @@ -102,7 +108,7 @@ importView.controller('ImportViewController', [
'Dirigible-Editor': 'Editor'
};
item.url = new UriBuilder().path(transportApi.getFileImportUrl().split('/')).path($scope.selectedWorkspace.name).path($scope.uploadPath.split('/')).path(item.name).build();
} else if ($scope.inDialog && $scope.importType === 'data') {
} else if ($scope.inDialog && ($scope.importType === 'data' || $scope.importType === 'sql')) {
item.headers = {
'Dirigible-Editor': 'Editor'
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,21 @@ resultView.controller('DatabaseResultController', ['$scope', '$http', 'messageHu
importType: 'data',
uploadPath: url,
workspace: "",
table: $scope.datasource + " -> " + artifact[0] + " -> " + artifact[1],
table: $scope.datasource + "." + artifact[0] + "." + artifact[1],
}
);
}, true);

messageHub.onDidReceiveMessage("database.data.import.sql", function (command) {
let artifact = command.data.split('.');
let url = "/services/data/sql/" + $scope.datasource + "/" + artifact[0];
messageHub.showDialogWindow(
"import",
{
importType: 'sql',
uploadPath: url,
workspace: "",
schema: $scope.datasource + "." + artifact[0],
}
);
}, true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -275,4 +275,13 @@ public interface ISqlDialect<SELECT extends SelectBuilder, INSERT extends Insert
*/
void importData(Connection connection, String table, InputStream input) throws Exception;

/**
* Process SQL.
*
* @param connection the connection
* @param schema the schema
* @param is the input stream
*/
public void processSQL(Connection connection, String schema, InputStream is) throws Exception;

}
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,11 @@
import org.eclipse.dirigible.database.sql.builders.sequence.LastValueIdentityBuilder;
import org.eclipse.dirigible.database.sql.builders.sequence.NextValueSequenceBuilder;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.sql.*;
import java.util.Arrays;
import java.util.Collections;
Expand Down Expand Up @@ -481,6 +484,15 @@ public int count(Connection connection, String table) throws SQLException {
return count(connection, null, table);
}

/**
* Count.
*
* @param connection the connection
* @param schema the schema
* @param table the table
* @return the int
* @throws SQLException the SQL exception
*/
public int count(Connection connection, String schema, String table) throws SQLException {
String sql = countQuery(schema, table);
PreparedStatement statement = connection.prepareStatement(sql);
Expand Down Expand Up @@ -517,6 +529,13 @@ public String countQuery(String table) {
return countQuery(null, table);
}

/**
* Count query.
*
* @param schema the schema
* @param table the table
* @return the string
*/
public String countQuery(String schema, String table) {
String normalizeTableName = normalizeTableName(table);
return new SelectBuilder(this).column("COUNT(*)")
Expand Down Expand Up @@ -575,4 +594,43 @@ public void importData(Connection connection, String table, InputStream input) t
throw new SQLFeatureNotSupportedException();
}

/**
* Process SQL.
*
* @param connection the connection
* @param schema the schema
* @param is the is
* @throws Exception the exception
*/
@Override
public void processSQL(Connection connection, String schema, InputStream is) throws Exception {
BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
StringBuilder builder = new StringBuilder();
int ch;
while ((ch = reader.read()) != -1) {
if (ch == ';') {
executeUpdate(connection, builder.toString());
builder.setLength(0);
} else {
builder.append((char) ch);
}
}
if (builder.length() > 0) {
executeUpdate(connection, builder.toString());
}
reader.close();
}

/**
* Execute update.
*
* @param connection the connection
* @param sql the sql
* @throws SQLException the SQL exception
*/
private void executeUpdate(Connection connection, String sql) throws SQLException {
PreparedStatement statement = connection.prepareStatement(sql);
statement.executeUpdate();
}

}

0 comments on commit f1fa783

Please sign in to comment.