Skip to content

Commit

Permalink
Add a single endpoint that visualizes new table. [Hivepad itegration] (
Browse files Browse the repository at this point in the history
…#6)

* Create additional endpoint that vizualized the new table

* Unit tests

* Use proper schema depending on the engine
  • Loading branch information
bkyryliuk authored and bogdan-dbx committed May 13, 2020
1 parent fefcf5c commit cac699f
Show file tree
Hide file tree
Showing 5 changed files with 166 additions and 35 deletions.
2 changes: 1 addition & 1 deletion superset/connectors/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ class BaseDatasource(
# Used to do code highlighting when displaying the query in the UI
query_language: Optional[str] = None

name = None # can be a Column or a property pointing to one
name: Optional[str] = None # can be a Column or a property pointing to one

# ---------------------------------------------------------------

Expand Down
9 changes: 4 additions & 5 deletions superset/security/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,15 +268,15 @@ def datasource_access(self, datasource: "BaseDatasource") -> bool:
"datasource_access", datasource.perm
)

def get_datasource_access_error_msg(self, datasource: "BaseDatasource") -> str:
def get_datasource_access_error_msg(self, datasource_name: Optional[str]) -> str:
"""
Return the error message for the denied Superset datasource.
:param datasource: The denied Superset datasource
:param datasource_name: The denied Superset datasource name
:returns: The error message
"""

return f"""This endpoint requires the datasource {datasource.name}, database or
return f"""This endpoint requires the datasource {datasource_name}, database or
`all_datasource_access` permission"""

def get_datasource_access_link(self, datasource: "BaseDatasource") -> Optional[str]:
Expand Down Expand Up @@ -350,7 +350,6 @@ def rejected_tables(
) -> Set["Table"]:
"""
Return the list of rejected SQL tables.
:param sql: The SQL statement
:param database: The SQL database
:param schema: The SQL database schema
Expand Down Expand Up @@ -829,7 +828,7 @@ def assert_datasource_permission(self, datasource: "BaseDatasource") -> None:

if not self.datasource_access(datasource):
raise SupersetSecurityException(
self.get_datasource_access_error_msg(datasource),
self.get_datasource_access_error_msg(datasource.name),
self.get_datasource_access_link(datasource),
)

Expand Down
119 changes: 90 additions & 29 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@
from superset.utils.dates import now_as_float
from superset.utils.decorators import etag_cache, stats_timing
from superset.views.database.filters import DatabaseFilter
from superset.views.utils import get_dashboard_extra_filters
from superset.views.utils import get_dashboard_extra_filters, parse_table_full_name

from .base import (
api,
Expand Down Expand Up @@ -289,6 +289,48 @@ class AccessRequestsModelView(SupersetModelView, DeleteMixin):
}


def create_if_not_exists_table(
database_id, schema_name, table_name, template_params=None, is_sqllab_view=False
):
database_obj = db.session.query(Database).filter_by(id=database_id).one()
if not security_manager.can_access_datasource(
database_obj, Table(table_name, schema_name)
):
full_table_name = (
"{}.{}".format(schema_name, table_name) if schema_name else table_name
)
flash(
__(security_manager.get_datasource_access_error_msg(full_table_name)),
"danger",
)

SqlaTable = ConnectorRegistry.sources["table"]
table = (
db.session.query(SqlaTable)
.filter_by(database_id=database_id, schema=schema_name, table_name=table_name)
.one_or_none()
)
if not table:
# Create table if doesn't exist.
with db.session.no_autoflush:
table = SqlaTable(table_name=table_name, owners=[g.user])
table.database_id = database_id
table.database = (
db.session.query(models.Database).filter_by(id=database_id).one()
)
table.schema = schema_name
table.template_params = template_params
table.is_sqllab_view = is_sqllab_view
# needed for the table validation.
validate_sqlatable(table)

db.session.add(table)
table.fetch_metadata()
create_table_permissions(table)
db.session.commit()
return table.id


@talisman(force_https=False)
@app.route("/health")
def health():
Expand Down Expand Up @@ -758,6 +800,31 @@ def import_dashboards(self):
return redirect("/dashboard/list/")
return self.render_template("superset/import_dashboards.html")

@event_logger.log_this
@has_access
@expose(
"/explore_new/<database_id>/<datasource_type>/<datasource_name>/",
methods=["GET", "POST"],
)
def explore_new(self, database_id=None, datasource_type=None, datasource_name=None):
"""Integration endpoint. Allows to visualize tables that were not precreated in superset.
:param database_id: database id
:param datasource_type: table or druid
:param datasource_name: full name of the datasource, should include schema name if applicable
:return: redirects to the exploration page
"""
# overloading is_sqllab_view to be able to hide the temporary tables from the table list.
is_sqllab_view = request.args.get("is_sqllab_view") == "true"
assert (
datasource_type == "table"
), f"Only table datasource_type is supported, not {datasource_type}."
schema_name, table_name = parse_table_full_name(datasource_name)
table_id = create_if_not_exists_table(
database_id, schema_name, table_name, is_sqllab_view=is_sqllab_view
)
return redirect(f"/superset/explore/{datasource_type}/{table_id}")

@event_logger.log_this
@has_access
@expose("/explore/<datasource_type>/<datasource_id>/", methods=["GET", "POST"])
Expand Down Expand Up @@ -816,7 +883,7 @@ def explore(self, datasource_type=None, datasource_id=None):
not security_manager.datasource_access(datasource)
):
flash(
__(security_manager.get_datasource_access_error_msg(datasource)),
__(security_manager.get_datasource_access_error_msg(datasource.name)),
"danger",
)
return redirect(
Expand Down Expand Up @@ -1823,7 +1890,9 @@ def dashboard(self, dashboard_id):
if datasource and not security_manager.datasource_access(datasource):
flash(
__(
security_manager.get_datasource_access_error_msg(datasource)
security_manager.get_datasource_access_error_msg(
datasource.name
)
),
"danger",
)
Expand All @@ -1844,6 +1913,9 @@ def dashboard(self, dashboard_id):
) and security_manager.can_access("can_save_dash", "Superset")
dash_save_perm = security_manager.can_access("can_save_dash", "Superset")
superset_can_explore = security_manager.can_access("can_explore", "Superset")
superset_can_explore_new = security_manager.can_access(
"can_explore_new", "Superset"
)
superset_can_csv = security_manager.can_access("can_csv", "Superset")
slice_can_edit = security_manager.can_access("can_edit", "SliceModelView")

Expand Down Expand Up @@ -1873,6 +1945,7 @@ def dashboard(**kwargs):
"dash_save_perm": dash_save_perm,
"dash_edit_perm": dash_edit_perm,
"superset_can_explore": superset_can_explore,
"superset_can_explore_new": superset_can_explore_new,
"superset_can_csv": superset_can_csv,
"slice_can_edit": slice_can_edit,
}
Expand Down Expand Up @@ -1984,34 +2057,22 @@ def sqllab_table_viz(self):
* templateParams - params for the Jinja templating syntax, optional
:return: Response
"""
SqlaTable = ConnectorRegistry.sources["table"]
data = json.loads(request.form.get("data"))
table_name = data.get("datasourceName")
database_id = data.get("dbId")
table = (
db.session.query(SqlaTable)
.filter_by(database_id=database_id, table_name=table_name)
.one_or_none()
)
if not table:
# Create table if doesn't exist.
with db.session.no_autoflush:
table = SqlaTable(table_name=table_name, owners=[g.user])
table.database_id = database_id
table.database = (
db.session.query(models.Database).filter_by(id=database_id).one()
)
table.schema = data.get("schema")
table.template_params = data.get("templateParams")
# needed for the table validation.
validate_sqlatable(table)

db.session.add(table)
table.fetch_metadata()
create_table_permissions(table)
db.session.commit()

return json_success(json.dumps({"table_id": table.id}))
table_name = data.get("datasourceName")
schema_name = data.get("schema")
# overloading is_sqllab_view to be able to hide the temporary tables from the table list.
is_sqllab_view = request.args.get("is_sqllab_view") == "true"
template_params = data.get("templateParams")

table_id = create_if_not_exists_table(
database_id,
schema_name,
table_name,
template_params=template_params,
is_sqllab_view=is_sqllab_view,
)
return json_success(json.dumps({"table_id": table_id}))

@has_access
@expose("/sqllab_viz/", methods=["POST"])
Expand Down
13 changes: 13 additions & 0 deletions superset/views/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,3 +361,16 @@ def is_slice_in_container(layout: Dict, container_id: str, slice_id: int) -> boo
)

return False


def parse_table_full_name(full_table_name: str) -> Tuple[Optional[str], str]:
"""Parses full table name into components like table name, schema name.
Note the table name conforms to the [[cluster.]schema.]table construct.
"""
table_name_pieces = full_table_name.split(".")
if len(table_name_pieces) == 3:
return table_name_pieces[1], table_name_pieces[1]
elif len(table_name_pieces) == 2:
return table_name_pieces[0], table_name_pieces[1]
return None, table_name_pieces[0]
58 changes: 58 additions & 0 deletions tests/core_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import pandas as pd
import sqlalchemy as sqla

from superset.utils.core import get_example_database
from tests.test_app import app
from superset import (
dataframe,
Expand Down Expand Up @@ -769,6 +770,63 @@ def test_slice_id_is_always_logged_correctly_on_ajax_request(self):
self.get_json_resp(slc_url, {"form_data": json.dumps(slc.form_data)})
self.assertEqual(1, qry.count())

def test_explore_new(self):
self.login("admin")
examples_db = get_example_database()
examples_dbid = examples_db.id
table_schema = None
if examples_db.backend == "mysql":
table_schema = "superset"
elif examples_db.backend == "sqlite":
table_schema = "main"
elif examples_db.backend == "postgres":
# no schema is created for this test in postgres
table_schema = ""

table_name = "ab_role"
full_table_name = f"{table_schema}.{table_name}" if table_schema else table_name
resp = self.get_resp(
f"/superset/explore_new/{examples_dbid}/table/{full_table_name}"
)
self.assertIn(full_table_name, resp)

# ensure owner is set correctly
table = (
db.session.query(SqlaTable)
.filter_by(
database_id=examples_dbid, table_name=table_name, schema=table_schema
)
.one()
)
self.assertEqual([owner.username for owner in table.owners], ["admin"])

# ensure that you can call it twice
resp = self.get_resp(
f"/superset/explore_new/{examples_dbid}/table/{full_table_name}"
)
self.assertIn(full_table_name, resp)

db.session.delete(table)
db.session.commit()

# test is_sqllab_view flag
sqllab_resp = self.get_resp(
f"/superset/explore_new/{examples_dbid}/table/{full_table_name}?is_sqllab_view=true"
)
self.assertIn(full_table_name, sqllab_resp)

# ensure is_sqllab_view is set
table = (
db.session.query(SqlaTable)
.filter_by(
database_id=examples_dbid, table_name=table_name, schema=table_schema
)
.one()
)
self.assertTrue(table.is_sqllab_view)
db.session.delete(table)
db.session.commit()

def test_import_csv(self):
self.login(username="admin")
table_name = "".join(random.choice(string.ascii_uppercase) for _ in range(5))
Expand Down

0 comments on commit cac699f

Please sign in to comment.