diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java index 9329012966a4ae..a32419ace115ed 100644 --- a/datahub-frontend/app/auth/AuthUtils.java +++ b/datahub-frontend/app/auth/AuthUtils.java @@ -47,6 +47,11 @@ public class AuthUtils { public static final String PASSWORD = "password"; public static final String ACTOR = "actor"; public static final String ACCESS_TOKEN = "token"; + public static final String FULL_NAME = "fullName"; + public static final String EMAIL = "email"; + public static final String TITLE = "title"; + public static final String INVITE_TOKEN = "inviteToken"; + public static final String RESET_TOKEN = "resetToken"; /** * Determines whether the inbound request should be forward to downstream Metadata Service. Today, this simply diff --git a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java new file mode 100644 index 00000000000000..db17313d67f9a4 --- /dev/null +++ b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java @@ -0,0 +1,23 @@ +package auth; + +/** + * Currently, this config enables or disable native user authentication. + */ +public class NativeAuthenticationConfigs { + + public static final String NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH = "auth.native.enabled"; + + private Boolean _isEnabled = true; + + public NativeAuthenticationConfigs(final com.typesafe.config.Config configs) { + if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH) + && Boolean.FALSE.equals( + Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()))) { + _isEnabled = false; + } + } + + public boolean isNativeAuthenticationEnabled() { + return _isEnabled; + } +} diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java index ba4303129e3b23..e32064f6084f6b 100644 --- a/datahub-frontend/app/client/AuthServiceClient.java +++ b/datahub-frontend/app/client/AuthServiceClient.java @@ -23,19 +23,29 @@ public class AuthServiceClient { private static final String GENERATE_SESSION_TOKEN_ENDPOINT = "auth/generateSessionTokenForUser"; + private static final String SIGN_UP_ENDPOINT = "auth/signUp"; + private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/resetNativeUserCredentials"; + private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/verifyNativeUserCredentials"; private static final String ACCESS_TOKEN_FIELD = "accessToken"; private static final String USER_ID_FIELD = "userId"; + private static final String USER_URN_FIELD = "userUrn"; + private static final String FULL_NAME_FIELD = "fullName"; + private static final String EMAIL_FIELD = "email"; + private static final String TITLE_FIELD = "title"; + private static final String PASSWORD_FIELD = "password"; + private static final String INVITE_TOKEN_FIELD = "inviteToken"; + private static final String RESET_TOKEN_FIELD = "resetToken"; + private static final String IS_NATIVE_USER_CREATED_FIELD = "isNativeUserCreated"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = "areNativeUserCredentialsReset"; + private static final String DOES_PASSWORD_MATCH_FIELD = "doesPasswordMatch"; private final String metadataServiceHost; private final Integer metadataServicePort; private final Boolean metadataServiceUseSsl; private final Authentication systemAuthentication; - public AuthServiceClient( - @Nonnull final String metadataServiceHost, - @Nonnull final Integer metadataServicePort, - @Nonnull final Boolean useSsl, - @Nonnull final Authentication systemAuthentication) { + public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort, + @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication) { this.metadataServiceHost = Objects.requireNonNull(metadataServiceHost); this.metadataServicePort = Objects.requireNonNull(metadataServicePort); this.metadataServiceUseSsl = Objects.requireNonNull(useSsl); @@ -88,6 +98,154 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { } } + /** + * Call the Auth Service to create a native Datahub user. + */ + @Nonnull + public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullName, @Nonnull final String email, + @Nonnull final String title, @Nonnull final String password, @Nonnull final String inviteToken) { + Objects.requireNonNull(userUrn, "userUrn must not be null"); + Objects.requireNonNull(fullName, "fullName must not be null"); + Objects.requireNonNull(email, "email must not be null"); + Objects.requireNonNull(title, "title must not be null"); + Objects.requireNonNull(password, "password must not be null"); + Objects.requireNonNull(inviteToken, "inviteToken must not be null"); + CloseableHttpClient httpClient = HttpClients.createDefault(); + + try { + + final String protocol = this.metadataServiceUseSsl ? "https" : "http"; + final HttpPost request = + new HttpPost(String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, + SIGN_UP_ENDPOINT)); + + // Build JSON request to verify credentials for a native user. + String json = + String.format("{ \"%s\":\"%s\", \"%s\":\"%s\", \"%s\":\"%s\", \"%s\":\"%s\", \"%s\":\"%s\", \"%s\":\"%s\" }", + USER_URN_FIELD, userUrn, FULL_NAME_FIELD, fullName, EMAIL_FIELD, email, TITLE_FIELD, title, + PASSWORD_FIELD, password, INVITE_TOKEN_FIELD, inviteToken); + request.setEntity(new StringEntity(json)); + + // Add authorization header with DataHub frontend system id and secret. + request.addHeader(Http.HeaderNames.AUTHORIZATION, this.systemAuthentication.getCredentials()); + + CloseableHttpResponse response = httpClient.execute(request); + final HttpEntity entity = response.getEntity(); + if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { + // Successfully generated a token for the User + final String jsonStr = EntityUtils.toString(entity); + return getIsNativeUserCreatedFromJson(jsonStr); + } else { + throw new RuntimeException( + String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), + response.getEntity().toString())); + } + } catch (Exception e) { + throw new RuntimeException("Failed to create user", e); + } finally { + try { + httpClient.close(); + } catch (Exception e) { + log.warn("Failed to close http client", e); + } + } + } + + /** + * Call the Auth Service to reset credentials for a native DataHub user. + */ + @Nonnull + public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password, + @Nonnull final String resetToken) { + Objects.requireNonNull(userUrn, "userUrn must not be null"); + Objects.requireNonNull(password, "password must not be null"); + Objects.requireNonNull(resetToken, "reset token must not be null"); + CloseableHttpClient httpClient = HttpClients.createDefault(); + + try { + + final String protocol = this.metadataServiceUseSsl ? "https" : "http"; + final HttpPost request = new HttpPost( + String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, + RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); + + // Build JSON request to verify credentials for a native user. + String json = + String.format("{ \"%s\":\"%s\", \"%s\":\"%s\", \"%s\":\"%s\" }", USER_URN_FIELD, userUrn, + PASSWORD_FIELD, password, RESET_TOKEN_FIELD, resetToken); + request.setEntity(new StringEntity(json)); + + // Add authorization header with DataHub frontend system id and secret. + request.addHeader(Http.HeaderNames.AUTHORIZATION, this.systemAuthentication.getCredentials()); + + CloseableHttpResponse response = httpClient.execute(request); + final HttpEntity entity = response.getEntity(); + if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { + // Successfully generated a token for the User + final String jsonStr = EntityUtils.toString(entity); + return getAreNativeUserCredentialsResetFromJson(jsonStr); + } else { + throw new RuntimeException( + String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), + response.getEntity().toString())); + } + } catch (Exception e) { + throw new RuntimeException("Failed to reset credentials for user", e); + } finally { + try { + httpClient.close(); + } catch (Exception e) { + log.warn("Failed to close http client", e); + } + } + } + + /** + * Call the Auth Service to verify the credentials for a native Datahub user. + */ + @Nonnull + public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password) { + Objects.requireNonNull(userUrn, "userUrn must not be null"); + Objects.requireNonNull(password, "password must not be null"); + CloseableHttpClient httpClient = HttpClients.createDefault(); + + try { + + final String protocol = this.metadataServiceUseSsl ? "https" : "http"; + final HttpPost request = new HttpPost( + String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, + VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); + + // Build JSON request to verify credentials for a native user. + String json = + String.format("{ \"%s\":\"%s\", \"%s\":\"%s\" }", USER_URN_FIELD, userUrn, PASSWORD_FIELD, password); + request.setEntity(new StringEntity(json)); + + // Add authorization header with DataHub frontend system id and secret. + request.addHeader(Http.HeaderNames.AUTHORIZATION, this.systemAuthentication.getCredentials()); + + CloseableHttpResponse response = httpClient.execute(request); + final HttpEntity entity = response.getEntity(); + if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { + // Successfully generated a token for the User + final String jsonStr = EntityUtils.toString(entity); + return getDoesPasswordMatchFromJson(jsonStr); + } else { + throw new RuntimeException( + String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), + response.getEntity().toString())); + } + } catch (Exception e) { + throw new RuntimeException("Failed to verify credentials for user", e); + } finally { + try { + httpClient.close(); + } catch (Exception e) { + log.warn("Failed to close http client", e); + } + } + } + private String getAccessTokenFromJson(final String jsonStr) { ObjectMapper mapper = new ObjectMapper(); try { @@ -97,4 +255,31 @@ private String getAccessTokenFromJson(final String jsonStr) { throw new IllegalArgumentException("Failed to parse JSON received from the MetadataService!"); } } + + private boolean getIsNativeUserCreatedFromJson(final String jsonStr) { + ObjectMapper mapper = new ObjectMapper(); + try { + return mapper.readTree(jsonStr).get(IS_NATIVE_USER_CREATED_FIELD).asBoolean(); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to parse JSON received from the MetadataService!"); + } + } + + private boolean getAreNativeUserCredentialsResetFromJson(final String jsonStr) { + ObjectMapper mapper = new ObjectMapper(); + try { + return mapper.readTree(jsonStr).get(ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD).asBoolean(); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to parse JSON received from the MetadataService!"); + } + } + + private boolean getDoesPasswordMatchFromJson(final String jsonStr) { + ObjectMapper mapper = new ObjectMapper(); + try { + return mapper.readTree(jsonStr).get(DOES_PASSWORD_MATCH_FIELD).asBoolean(); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to parse JSON received from the MetadataService!"); + } + } } diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index 048c8e6ac3cab8..c43d806f593524 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -23,12 +23,12 @@ import play.mvc.Result; import auth.AuthUtils; import auth.JAASConfigs; +import auth.NativeAuthenticationConfigs; import auth.sso.SsoManager; import security.AuthenticationManager; import javax.annotation.Nonnull; import javax.inject.Inject; -import javax.naming.NamingException; import java.time.Duration; import java.time.temporal.ChronoUnit; @@ -45,6 +45,7 @@ public class AuthenticationController extends Controller { private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); private final Config _configs; private final JAASConfigs _jaasConfigs; + private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; @Inject private org.pac4j.core.config.Config _ssoConfig; @@ -62,6 +63,7 @@ public class AuthenticationController extends Controller { public AuthenticationController(@Nonnull Config configs) { _configs = configs; _jaasConfigs = new JAASConfigs(configs); + _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); } /** @@ -87,9 +89,10 @@ public Result authenticate() { return redirectToIdentityProvider(); } - // 2. If JAAS auth is enabled, fallback to it - if (_jaasConfigs.isJAASEnabled()) { - return redirect(LOGIN_ROUTE + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); + // 2. If either JAAS auth or Native auth is enabled, fallback to it + if (_jaasConfigs.isJAASEnabled() || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { + return redirect( + LOGIN_ROUTE + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); } // 3. If no auth enabled, fallback to using default user account & redirect. @@ -109,9 +112,15 @@ public Result authenticate() { */ @Nonnull public Result logIn() { - if (!_jaasConfigs.isJAASEnabled()) { + boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); + _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); + boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; + if (noAuthEnabled) { + String message = "Neither JAAS nor native authentication is enabled on the server."; final ObjectNode error = Json.newObject(); - error.put("message", "JAAS authentication is not enabled on the server."); + error.put("message", message); return badRequest(error); } @@ -120,23 +129,19 @@ public Result logIn() { final String password = json.findPath(PASSWORD).textValue(); if (StringUtils.isBlank(username)) { - JsonNode invalidCredsJson = Json.newObject() - .put("message", "User name must not be empty."); + JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); return badRequest(invalidCredsJson); } ctx().session().clear(); - try { - AuthenticationManager.authenticateUser(username, password); - } catch (NamingException e) { - _logger.error("Authentication error", e); - JsonNode invalidCredsJson = Json.newObject() - .put("message", "Invalid Credentials"); + JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); + boolean loginSucceeded = tryLogin(username, password); + + if (!loginSucceeded) { return badRequest(invalidCredsJson); } - final Urn actorUrn = new CorpuserUrn(username); final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); ctx().session().put(ACTOR, actorUrn.toString()); @@ -147,6 +152,119 @@ public Result logIn() { .build()); } + /** + * Sign up a native user based on a name, email, title, and password. The invite token must match the global invite + * token stored for the DataHub instance. + * + */ + @Nonnull + public Result signUp() { + boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return badRequest(error); + } + + final JsonNode json = request().body().asJson(); + final String fullName = json.findPath(FULL_NAME).textValue(); + final String email = json.findPath(EMAIL).textValue(); + final String title = json.findPath(TITLE).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); + + if (StringUtils.isBlank(fullName)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); + return badRequest(invalidCredsJson); + } + + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return badRequest(invalidCredsJson); + } + + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return badRequest(invalidCredsJson); + } + + if (StringUtils.isBlank(title)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); + return badRequest(invalidCredsJson); + } + + if (StringUtils.isBlank(inviteToken)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Invite token must not be empty."); + return badRequest(invalidCredsJson); + } + + ctx().session().clear(); + + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + boolean isNativeUserCreated = _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + ctx().session().put(ACTOR, userUrnString); + ctx().session().put(ACCESS_TOKEN, accessToken); + return ok().withCookies(Http.Cookie.builder(ACTOR, userUrnString) + .withHttpOnly(false) + .withMaxAge(Duration.of(30, ChronoUnit.DAYS)) + .build()); + } + + /** + * Create a native user based on a name, email, and password. + * + */ + @Nonnull + public Result resetNativeUserCredentials() { + boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return badRequest(error); + } + + final JsonNode json = request().body().asJson(); + final String email = json.findPath(EMAIL).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String resetToken = json.findPath(RESET_TOKEN).textValue(); + + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return badRequest(invalidCredsJson); + } + + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return badRequest(invalidCredsJson); + } + + if (StringUtils.isBlank(resetToken)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); + return badRequest(invalidCredsJson); + } + + ctx().session().clear(); + + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + boolean areNativeUserCredentialsReset = + _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); + _logger.debug(String.format("Are native user credentials reset: %b", areNativeUserCredentialsReset)); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + ctx().session().put(ACTOR, userUrnString); + ctx().session().put(ACCESS_TOKEN, accessToken); + return ok().withCookies(Http.Cookie.builder(ACTOR, userUrnString) + .withHttpOnly(false) + .withMaxAge(Duration.of(30, ChronoUnit.DAYS)) + .build()); + } + private Result redirectToIdentityProvider() { final PlayWebContext playWebContext = new PlayWebContext(ctx(), _playSessionStore); final Client client = _ssoManager.getSsoProvider().client(); @@ -168,4 +286,30 @@ private String encodeRedirectUri(final String redirectUri) { throw new RuntimeException(String.format("Failed to encode redirect URI %s", redirectUri), e); } } + + private boolean tryLogin(String username, String password) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); + boolean loginSucceeded = false; + + // First try jaas login, if enabled + if (_jaasConfigs.isJAASEnabled()) { + try { + _logger.debug("Attempting jaas authentication"); + AuthenticationManager.authenticateJaasUser(username, password); + loginSucceeded = true; + _logger.debug("Jaas authentication successful"); + } catch (Exception e) { + _logger.debug("Jaas authentication error", e); + } + } + + // If jaas login fails or is disabled, try native auth login + if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { + final Urn userUrn = new CorpuserUrn(username); + final String userUrnString = userUrn.toString(); + loginSucceeded = loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); + } + + return loginSucceeded; + } } \ No newline at end of file diff --git a/datahub-frontend/app/security/AuthenticationManager.java b/datahub-frontend/app/security/AuthenticationManager.java index 8103d4a8c2d151..3845438acdbd5c 100644 --- a/datahub-frontend/app/security/AuthenticationManager.java +++ b/datahub-frontend/app/security/AuthenticationManager.java @@ -23,7 +23,7 @@ private AuthenticationManager() { } - public static void authenticateUser(@Nonnull String userName, @Nonnull String password) throws NamingException { + public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) throws NamingException { Preconditions.checkArgument(!StringUtils.isAnyEmpty(userName), "Username cannot be empty"); try { JAASLoginService jaasLoginService = new JAASLoginService("WHZ-Authentication"); diff --git a/datahub-frontend/conf/application.conf b/datahub-frontend/conf/application.conf index 4dbff51ce29dae..1616f24e2548ee 100644 --- a/datahub-frontend/conf/application.conf +++ b/datahub-frontend/conf/application.conf @@ -152,11 +152,13 @@ auth.oidc.readTimeout = ${?AUTH_OIDC_READ_TIMEOUT} # any username / password combination as valid credentials. To disable this entry point altogether, specify the following config: # auth.jaas.enabled = ${?AUTH_JAAS_ENABLED} +auth.native.enabled = ${?AUTH_NATIVE_ENABLED} # -# To disable all authentication to the app, and proxy all users through a master "datahub" account, make sure that both -# jaas and oidc auth are disabled: +# To disable all authentication to the app, and proxy all users through a master "datahub" account, make sure that, +# jaas, native and oidc auth are disabled: # # auth.jaas.enabled = false +# auth.native.enabled = false # auth.oidc.enabled = false # (or simply omit oidc configurations) # Login session expiration time diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index 32af2d54f4a267..77d8aef9c4742e 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -14,6 +14,8 @@ GET /config co # Authentication in React GET /authenticate controllers.AuthenticationController.authenticate() POST /logIn controllers.AuthenticationController.logIn() +POST /signUp controllers.AuthenticationController.signUp() +POST /resetNativeUserCredentials controllers.AuthenticationController.resetNativeUserCredentials() GET /callback/:protocol controllers.SsoCallbackController.handleCallback(protocol: String) POST /callback/:protocol controllers.SsoCallbackController.handleCallback(protocol: String) GET /logOut controllers.CentralLogoutController.executeLogout() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 07439350f6323d..4fd52cd301c283 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -2,6 +2,7 @@ import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.token.StatefulTokenService; +import com.datahub.authentication.user.NativeUserService; import com.datahub.authorization.AuthorizationConfiguration; import com.google.common.collect.ImmutableList; import com.linkedin.common.VersionedUrn; @@ -86,12 +87,14 @@ import com.linkedin.datahub.graphql.resolvers.domain.SetDomainResolver; import com.linkedin.datahub.graphql.resolvers.domain.UnsetDomainResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityExistsResolver; +import com.linkedin.datahub.graphql.resolvers.glossary.AddRelatedTermsResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryNodeResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryTermResolver; import com.linkedin.datahub.graphql.resolvers.glossary.DeleteGlossaryEntityResolver; import com.linkedin.datahub.graphql.resolvers.glossary.GetRootGlossaryNodesResolver; import com.linkedin.datahub.graphql.resolvers.glossary.GetRootGlossaryTermsResolver; import com.linkedin.datahub.graphql.resolvers.glossary.ParentNodesResolver; +import com.linkedin.datahub.graphql.resolvers.glossary.RemoveRelatedTermsResolver; import com.linkedin.datahub.graphql.resolvers.group.AddGroupMembersResolver; import com.linkedin.datahub.graphql.resolvers.group.CreateGroupResolver; import com.linkedin.datahub.graphql.resolvers.group.EntityCountsResolver; @@ -161,6 +164,9 @@ import com.linkedin.datahub.graphql.resolvers.type.PlatformSchemaUnionTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.ResultsTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver; +import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserInviteTokenResolver; +import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserResetTokenResolver; +import com.linkedin.datahub.graphql.resolvers.user.GetNativeUserInviteTokenResolver; import com.linkedin.datahub.graphql.resolvers.user.ListUsersResolver; import com.linkedin.datahub.graphql.resolvers.user.RemoveUserResolver; import com.linkedin.datahub.graphql.resolvers.user.UpdateUserStatusResolver; @@ -261,6 +267,7 @@ public class GmsGraphQLEngine { private final boolean supportsImpactAnalysis; private final TimeseriesAspectService timeseriesAspectService; private final TimelineService timelineService; + private final NativeUserService nativeUserService; private final IngestionConfiguration ingestionConfiguration; private final AuthenticationConfiguration authenticationConfiguration; @@ -333,6 +340,7 @@ public GmsGraphQLEngine( final TimeseriesAspectService timeseriesAspectService, final EntityRegistry entityRegistry, final SecretService secretService, + final NativeUserService nativeUserService, final IngestionConfiguration ingestionConfiguration, final AuthenticationConfiguration authenticationConfiguration, final AuthorizationConfiguration authorizationConfiguration, @@ -359,6 +367,7 @@ public GmsGraphQLEngine( this.supportsImpactAnalysis = supportsImpactAnalysis; this.timeseriesAspectService = timeseriesAspectService; this.timelineService = timelineService; + this.nativeUserService = nativeUserService; this.ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration); this.authenticationConfiguration = Objects.requireNonNull(authenticationConfiguration); @@ -623,6 +632,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) .dataFetcher("getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) + .dataFetcher("getNativeUserInviteToken", new GetNativeUserInviteTokenResolver(this.nativeUserService)) ); } @@ -695,6 +705,10 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("updateParentNode", new UpdateParentNodeResolver(entityService)) .dataFetcher("deleteGlossaryEntity", new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) .dataFetcher("updateName", new UpdateNameResolver(entityService)) + .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) + .dataFetcher("removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) + .dataFetcher("createNativeUserInviteToken", new CreateNativeUserInviteTokenResolver(this.nativeUserService)) + .dataFetcher("createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) ); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index 173d96f804682e..0ff396374e1ede 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -33,6 +33,10 @@ public static boolean canManageGlossaries(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } + public static boolean canManageUserCredentials(@Nonnull QueryContext context) { + return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); + } + public static boolean isAuthorized( @Nonnull QueryContext context, @Nonnull Optional resourceSpec, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index 68f25fd14df558..ecd6a422903b88 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -19,6 +19,7 @@ import java.util.Collections; import java.util.Optional; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; import static com.linkedin.metadata.Constants.*; @@ -63,6 +64,7 @@ public CompletableFuture get(DataFetchingEnvironment environm platformPrivileges.setManageTokens(canManageTokens(context)); platformPrivileges.setManageTests(canManageTests(context)); platformPrivileges.setManageGlossaries(canManageGlossaries(context)); + platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); // Construct and return authenticated user object. final AuthenticatedUser authUser = new AuthenticatedUser(); @@ -131,6 +133,14 @@ private boolean canManageGlossaries(final QueryContext context) { return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } + /** + * Returns true if the authenticated user has privileges to manage user credentials + */ + private boolean canManageUserCredentials(@Nonnull QueryContext context) { + return isAuthorized(context.getAuthorizer(), context.getActorUrn(), + PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); + } + /** * Returns true if the provided actor is authorized for a particular privilege, false otherwise. */ diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java new file mode 100644 index 00000000000000..c440f1a7721dcc --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -0,0 +1,122 @@ +package com.linkedin.datahub.graphql.resolvers.glossary; + +import com.linkedin.common.GlossaryTermUrnArray; +import com.linkedin.common.urn.GlossaryTermUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.RelatedTermsInput; +import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.glossary.GlossaryRelatedTerms; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + +@Slf4j +@RequiredArgsConstructor +public class AddRelatedTermsResolver implements DataFetcher> { + + private final EntityService _entityService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + + final QueryContext context = environment.getContext(); + final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + + return CompletableFuture.supplyAsync(() -> { + if (AuthorizationUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List termUrns = input.getTermUrns().stream() + .map(UrnUtils::getUrn) + .collect(Collectors.toList()); + validateRelatedTermsInput(urn, termUrns); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + + GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null + ); + if (glossaryRelatedTerms == null) { + glossaryRelatedTerms = new GlossaryRelatedTerms(); + } + + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); + + return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); + + return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } + } catch (Exception e) { + throw new RuntimeException(String.format("Failed to add related terms to %s", input.getUrn()), e); + } + } + throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + }); + } + + public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { + throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); + } + + for (Urn termUrn : termUrns) { + if (termUrn.equals(urn)) { + throw new IllegalArgumentException(String.format("Failed to update %s. Tried to create related term with itself.", urn)); + } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { + throw new IllegalArgumentException(String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); + } else if (!_entityService.exists(termUrn)) { + throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", urn, termUrn)); + } + } + return true; + } + + private Boolean updateRelatedTerms(List termUrns, GlossaryTermUrnArray existingTermUrns, Urn urn, GlossaryRelatedTerms glossaryRelatedTerms, Urn actor) { + List termsToAdd = new ArrayList<>(); + for (Urn termUrn : termUrns) { + if (existingTermUrns.stream().anyMatch(association -> association.equals(termUrn))) { + continue; + } + termsToAdd.add(termUrn); + } + + if (termsToAdd.size() == 0) { + return true; + } + + for (Urn termUrn : termsToAdd) { + GlossaryTermUrn newUrn = new GlossaryTermUrn(termUrn.getId()); + + existingTermUrns.add(newUrn); + } + persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); + return true; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java new file mode 100644 index 00000000000000..2ca9e081d7f3ff --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -0,0 +1,90 @@ +package com.linkedin.datahub.graphql.resolvers.glossary; + +import com.linkedin.common.GlossaryTermUrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.RelatedTermsInput; +import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.glossary.GlossaryRelatedTerms; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.getAspectFromEntity; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + +@Slf4j +@RequiredArgsConstructor +public class RemoveRelatedTermsResolver implements DataFetcher> { + + private final EntityService _entityService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + + final QueryContext context = environment.getContext(); + final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + + return CompletableFuture.supplyAsync(() -> { + if (AuthorizationUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List termUrnsToRemove = input.getTermUrns().stream() + .map(UrnUtils::getUrn) + .collect(Collectors.toList()); + + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { + throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); + } + + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + + GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null + ); + if (glossaryRelatedTerms == null) { + throw new RuntimeException(String.format("Related Terms for this Urn do not exist: %s", urn)); + } + + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); + + existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); + return true; + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); + + existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); + return true; + } + } catch (Exception e) { + throw new RuntimeException(String.format("Failed to removes related terms from %s", input.getUrn()), e); + } + } + throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserInviteTokenResolver.java new file mode 100644 index 00000000000000..9f8a5ef0e87e49 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserInviteTokenResolver.java @@ -0,0 +1,41 @@ +package com.linkedin.datahub.graphql.resolvers.user; + +import com.datahub.authentication.user.NativeUserService; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.InviteToken; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; + +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; + +/** + * Resolver responsible for creating an invite token that Admins can share with prospective users to create native + * user accounts. + */ +public class CreateNativeUserInviteTokenResolver implements DataFetcher> { + private final NativeUserService _nativeUserService; + + public CreateNativeUserInviteTokenResolver(final NativeUserService nativeUserService) { + _nativeUserService = nativeUserService; + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync(() -> { + if (!canManageUserCredentials(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + String inviteToken = _nativeUserService.generateNativeUserInviteToken(context.getAuthentication()); + return new InviteToken(inviteToken); + } catch (Exception e) { + throw new RuntimeException("Failed to generate new invite token"); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java new file mode 100644 index 00000000000000..d02f1a5f786a74 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java @@ -0,0 +1,52 @@ +package com.linkedin.datahub.graphql.resolvers.user; + +import com.datahub.authentication.user.NativeUserService; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.CreateNativeUserResetTokenInput; +import com.linkedin.datahub.graphql.generated.ResetToken; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; + +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + +/** + * Resolver responsible for creating a password reset token that Admins can share with native users to reset their + * credentials. + */ +public class CreateNativeUserResetTokenResolver implements DataFetcher> { + private final NativeUserService _nativeUserService; + + public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserService) { + _nativeUserService = nativeUserService; + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final CreateNativeUserResetTokenInput input = + bindArgument(environment.getArgument("input"), CreateNativeUserResetTokenInput.class); + + final String userUrnString = input.getUserUrn(); + Objects.requireNonNull(userUrnString, "No user urn was provided!"); + + if (!canManageUserCredentials(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + return CompletableFuture.supplyAsync(() -> { + try { + String resetToken = + _nativeUserService.generateNativeUserPasswordResetToken(userUrnString, context.getAuthentication()); + return new ResetToken(resetToken); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to generate password reset token for user: %s", userUrnString)); + } + }); + } +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/GetNativeUserInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/GetNativeUserInviteTokenResolver.java new file mode 100644 index 00000000000000..1d1a329c790b3f --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/GetNativeUserInviteTokenResolver.java @@ -0,0 +1,42 @@ +package com.linkedin.datahub.graphql.resolvers.user; + +import com.datahub.authentication.user.NativeUserService; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.InviteToken; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; + +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; + +/** + * Resolver responsible for getting an existing invite token that Admins can share with prospective users to create + * native user accounts. If the invite token does not already exist, this resolver will create a new one. + */ +public class GetNativeUserInviteTokenResolver implements DataFetcher> { + private final NativeUserService _nativeUserService; + + public GetNativeUserInviteTokenResolver(final NativeUserService nativeUserService) { + _nativeUserService = nativeUserService; + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + + return CompletableFuture.supplyAsync(() -> { + if (!canManageUserCredentials(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + String inviteToken = _nativeUserService.getNativeUserInviteToken(context.getAuthentication()); + return new InviteToken(inviteToken); + } catch (Exception e) { + throw new RuntimeException("Failed to generate new invite token"); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index 28f661fa997f0e..c2859d0d312d99 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -10,6 +10,7 @@ import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.identity.CorpUserCredentials; import com.linkedin.identity.CorpUserEditableInfo; import com.linkedin.identity.CorpUserInfo; import com.linkedin.identity.CorpUserStatus; @@ -45,8 +46,9 @@ public CorpUser apply(@Nonnull final EntityResponse entityResponse) { corpUser.setEditableProperties(CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (corpUser, dataMap) -> corpUser.setGlobalTags(GlobalTagsMapper.map(new GlobalTags(dataMap)))); - mappingHelper.mapToResult(CORP_USER_STATUS_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); + mappingHelper.mapToResult(CORP_USER_STATUS_ASPECT_NAME, + (corpUser, dataMap) -> corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); + mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); return mappingHelper.getResult(); } @@ -60,4 +62,11 @@ private void mapCorpUserInfo(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMa corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); } + + private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); + boolean isNativeUser = + corpUserCredentials != null && corpUserCredentials.hasSalt() && corpUserCredentials.hasHashedPassword(); + corpUser.setIsNativeUser(isNativeUser); + } } diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 6f61acb4362ca8..c183bd67006006 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -81,6 +81,11 @@ type PlatformPrivileges { Whether the user should be able to manage Glossaries """ manageGlossaries: Boolean! + + """ + Whether the user is able to manage user credentials + """ + manageUserCredentials: Boolean! } """ diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 73dd6b53171013..478cb69ac1034c 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -162,7 +162,12 @@ type Query { """ Get whether or not not an entity exists """ - entityExisst(urn: String!): Boolean + entityExists(urn: String!): Boolean + + """ + Gets the current invite token. If the optional regenerate param is set to true, generate a new invite token. + """ + getNativeUserInviteToken: InviteToken } """ @@ -365,7 +370,7 @@ type Mutation { Input required to report an operation """ input: ReportOperationInput!): String - + """ Create a new GlossaryTerm. Returns the urn of the newly created GlossaryTerm. If a term with the provided ID already exists, it will be overwritten. """ @@ -390,6 +395,26 @@ type Mutation { Updates the name of the entity. """ updateName(input: UpdateNameInput!): Boolean + + """ + Add multiple related Terms to a Glossary Term to establish relationships + """ + addRelatedTerms(input: RelatedTermsInput!): Boolean + + """ + Remove multiple related Terms for a Glossary Term + """ + removeRelatedTerms(input: RelatedTermsInput!): Boolean + + """ + Generates an invite token that can be shared with prospective users to create their accounts. + """ + createNativeUserInviteToken: InviteToken + + """ + Generates a token that can be shared with existing native users to reset their credentials. + """ + createNativeUserResetToken(input: CreateNativeUserResetTokenInput!): ResetToken } """ @@ -2662,6 +2687,11 @@ type CorpUser implements Entity { """ relationships(input: RelationshipsInput!): EntityRelationshipsResult + """ + Whether or not this user is a native DataHub user + """ + isNativeUser: Boolean + """ Deprecated, use properties field instead Additional read only info about the corp user @@ -6083,6 +6113,42 @@ enum SubResourceType { DATASET_FIELD } + +""" +Input provided when adding Terms to an asset +""" +input RelatedTermsInput { + """ + The Glossary Term urn to add or remove this relationship to/from + """ + urn: String! + + """ + The primary key of the Glossary Term to add or remove + """ + termUrns: [String!]! + + """ + The type of relationship we're adding or removing to/from for a Glossary Term + """ + relationshipType: TermRelationshipType! +} + +""" +A type of Metadata Entity sub resource +""" +enum TermRelationshipType { + """ + When a Term inherits from, or has an 'Is A' relationship with another Term + """ + isA + + """ + When a Term contains, or has a 'Has A' relationship with another Term + """ + hasA +} + """ Input provided when updating the association between a Metadata Entity and a Tag """ @@ -8146,3 +8212,33 @@ input StringMapEntryInput { """ value: String } + +""" +Token that allows users to sign up as a native user +""" +type InviteToken { + """ + The invite token + """ + inviteToken: String! +} + +""" +Input required to generate a password reset token for a native user. +""" +input CreateNativeUserResetTokenInput { + """ + The urn of the user to reset the password of + """ + userUrn: String! +} + +""" +Token that allows native users to reset their credentials +""" +type ResetToken { + """ + The reset token + """ + resetToken: String! +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java new file mode 100644 index 00000000000000..451faf9bc8e382 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -0,0 +1,255 @@ +package com.linkedin.datahub.graphql.resolvers.glossary; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.RelatedTermsInput; +import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.ExecutionException; + +import org.mockito.Mockito; +import org.testng.annotations.Test; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.*; + + +public class AddRelatedTermsResolverTest { + + private static final String TEST_ENTITY_URN = "urn:li:glossaryTerm:test-id-0"; + private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; + private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; + private static final String DATASET_URN = "urn:li:dataset:(test,test,test)"; + + private EntityService setUpService() { + EntityService mockService = Mockito.mock(EntityService.class); + Mockito.when(mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); + return mockService; + } + + @Test + public void testGetSuccessIsRelatedNonExistent() throws Exception { + EntityService mockService = setUpService(); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN, + TEST_TERM_2_URN + ), TermRelationshipType.isA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + assertTrue(resolver.get(mockEnv).get()); + + Mockito.verify(mockService, Mockito.times(1)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + Mockito.verify(mockService, Mockito.times(1)).exists( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) + ); + Mockito.verify(mockService, Mockito.times(1)).exists( + Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) + ); + Mockito.verify(mockService, Mockito.times(1)).exists( + Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) + ); + } + + @Test + public void testGetSuccessHasRelatedNonExistent() throws Exception { + EntityService mockService = setUpService(); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN, + TEST_TERM_2_URN + ), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + assertTrue(resolver.get(mockEnv).get()); + + Mockito.verify(mockService, Mockito.times(1)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + Mockito.verify(mockService, Mockito.times(1)).exists( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) + ); + Mockito.verify(mockService, Mockito.times(1)).exists( + Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) + ); + Mockito.verify(mockService, Mockito.times(1)).exists( + Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) + ); + } + + @Test + public void testGetFailAddSelfAsRelatedTerm() throws Exception { + EntityService mockService = setUpService(); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_ENTITY_URN + ), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + } + + @Test + public void testGetFailAddNonTermAsRelatedTerm() throws Exception { + EntityService mockService = setUpService(); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + DATASET_URN + ), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + } + + @Test + public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { + EntityService mockService = setUpService(); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN + ), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + } + + @Test + public void testGetFailAddToNonExistentUrn() throws Exception { + EntityService mockService = setUpService(); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN + ), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + } + + @Test + public void testGetFailAddToNonTerm() throws Exception { + EntityService mockService = setUpService(); + + Mockito.when(mockService.exists(Urn.createFromString(DATASET_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(DATASET_URN, ImmutableList.of( + TEST_TERM_1_URN + ), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + } + + @Test + public void testFailNoPermissions() throws Exception { + EntityService mockService = setUpService(); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN, + TEST_TERM_2_URN + ), TermRelationshipType.isA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + } + +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java new file mode 100644 index 00000000000000..6a704c2b61c127 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -0,0 +1,166 @@ +package com.linkedin.datahub.graphql.resolvers.glossary; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.GlossaryTermUrnArray; +import com.linkedin.common.urn.GlossaryTermUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.RelatedTermsInput; +import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.glossary.GlossaryRelatedTerms; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetchingEnvironment; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +import java.util.Arrays; +import java.util.concurrent.ExecutionException; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +public class RemoveRelatedTermsResolverTest { + + private static final String TEST_ENTITY_URN = "urn:li:glossaryTerm:test-id-0"; + private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; + private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; + + @Test + public void testGetSuccessIsA() throws Exception { + GlossaryTermUrn term1Urn = GlossaryTermUrn.createFromString(TEST_TERM_1_URN); + GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); + final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); + relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); + EntityService mockService = Mockito.mock(EntityService.class); + Mockito.when(mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(relatedTerms); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN + ), TermRelationshipType.isA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertTrue(resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(1)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + Mockito.verify(mockService, Mockito.times(1)).exists( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) + ); + } + + @Test + public void testGetSuccessHasA() throws Exception { + GlossaryTermUrn term1Urn = GlossaryTermUrn.createFromString(TEST_TERM_1_URN); + GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); + final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); + relatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); + EntityService mockService = Mockito.mock(EntityService.class); + Mockito.when(mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(relatedTerms); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN + ), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertTrue(resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(1)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + Mockito.verify(mockService, Mockito.times(1)).exists( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) + ); + } + + @Test + public void testFailAspectDoesNotExist() throws Exception { + EntityService mockService = Mockito.mock(EntityService.class); + Mockito.when(mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN + ), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + } + + @Test + public void testFailNoPermissions() throws Exception { + GlossaryTermUrn term1Urn = GlossaryTermUrn.createFromString(TEST_TERM_1_URN); + GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); + final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); + relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); + EntityService mockService = Mockito.mock(EntityService.class); + Mockito.when(mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(relatedTerms); + + Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( + TEST_TERM_1_URN + ), TermRelationshipType.isA); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class) + ); + Mockito.verify(mockService, Mockito.times(0)).exists( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) + ); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserInviteTokenResolverTest.java new file mode 100644 index 00000000000000..cf66367272fe59 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserInviteTokenResolverTest.java @@ -0,0 +1,50 @@ +package com.linkedin.datahub.graphql.resolvers.user; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.user.NativeUserService; +import com.linkedin.datahub.graphql.QueryContext; +import graphql.schema.DataFetchingEnvironment; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + + +public class CreateNativeUserInviteTokenResolverTest { + + private static final String INVITE_TOKEN = "inviteToken"; + + private NativeUserService _nativeUserService; + private CreateNativeUserInviteTokenResolver _resolver; + private DataFetchingEnvironment _dataFetchingEnvironment; + private Authentication _authentication; + + @BeforeMethod + public void setupTest() { + _nativeUserService = mock(NativeUserService.class); + _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + _authentication = mock(Authentication.class); + + _resolver = new CreateNativeUserInviteTokenResolver(_nativeUserService); + } + + @Test + public void testFailsCannotManageUserCredentials() { + QueryContext mockContext = getMockDenyContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + + assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); + } + + @Test + public void testPasses() throws Exception { + QueryContext mockContext = getMockAllowContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + when(mockContext.getAuthentication()).thenReturn(_authentication); + when(_nativeUserService.generateNativeUserInviteToken(any())).thenReturn(INVITE_TOKEN); + + assertEquals(INVITE_TOKEN, _resolver.get(_dataFetchingEnvironment).join().getInviteToken()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java new file mode 100644 index 00000000000000..2164d4160634ce --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java @@ -0,0 +1,66 @@ +package com.linkedin.datahub.graphql.resolvers.user; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.user.NativeUserService; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CreateNativeUserResetTokenInput; +import graphql.schema.DataFetchingEnvironment; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + + +public class CreateNativeUserResetTokenResolverTest { + + private static final String RESET_TOKEN = "resetToken"; + private static final String USER_URN_STRING = "urn:li:corpuser:test"; + + private NativeUserService _nativeUserService; + private CreateNativeUserResetTokenResolver _resolver; + private DataFetchingEnvironment _dataFetchingEnvironment; + private Authentication _authentication; + + @BeforeMethod + public void setupTest() { + _nativeUserService = mock(NativeUserService.class); + _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + _authentication = mock(Authentication.class); + + _resolver = new CreateNativeUserResetTokenResolver(_nativeUserService); + } + + @Test + public void testFailsCannotManageUserCredentials() { + QueryContext mockContext = getMockDenyContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + + assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); + } + + @Test + public void testFailsNullUserUrn() throws Exception { + QueryContext mockContext = getMockAllowContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(null); + when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); + when(mockContext.getAuthentication()).thenReturn(_authentication); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + + assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); + } + + @Test + public void testPasses() throws Exception { + QueryContext mockContext = getMockAllowContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(USER_URN_STRING); + when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); + when(mockContext.getAuthentication()).thenReturn(_authentication); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + + assertEquals(RESET_TOKEN, _resolver.get(_dataFetchingEnvironment).join().getResetToken()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/GetNativeUserInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/GetNativeUserInviteTokenResolverTest.java new file mode 100644 index 00000000000000..0870bcae5661b4 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/GetNativeUserInviteTokenResolverTest.java @@ -0,0 +1,50 @@ +package com.linkedin.datahub.graphql.resolvers.user; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.user.NativeUserService; +import com.linkedin.datahub.graphql.QueryContext; +import graphql.schema.DataFetchingEnvironment; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + + +public class GetNativeUserInviteTokenResolverTest { + + private static final String INVITE_TOKEN = "inviteToken"; + + private NativeUserService _nativeUserService; + private GetNativeUserInviteTokenResolver _resolver; + private DataFetchingEnvironment _dataFetchingEnvironment; + private Authentication _authentication; + + @BeforeMethod + public void setupTest() { + _nativeUserService = mock(NativeUserService.class); + _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + _authentication = mock(Authentication.class); + + _resolver = new GetNativeUserInviteTokenResolver(_nativeUserService); + } + + @Test + public void testFailsCannotManageUserCredentials() { + QueryContext mockContext = getMockDenyContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + + assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); + } + + @Test + public void testPasses() throws Exception { + QueryContext mockContext = getMockAllowContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + when(mockContext.getAuthentication()).thenReturn(_authentication); + when(_nativeUserService.getNativeUserInviteToken(any())).thenReturn(INVITE_TOKEN); + + assertEquals(INVITE_TOKEN, _resolver.get(_dataFetchingEnvironment).join().getInviteToken()); + } +} diff --git a/datahub-web-react/src/app/Routes.tsx b/datahub-web-react/src/app/Routes.tsx index accde6fb969f01..fa079283402ff6 100644 --- a/datahub-web-react/src/app/Routes.tsx +++ b/datahub-web-react/src/app/Routes.tsx @@ -2,6 +2,8 @@ import React from 'react'; import { Switch, Route, RouteProps, Redirect } from 'react-router-dom'; import { useReactiveVar } from '@apollo/client'; import { LogIn } from './auth/LogIn'; +import { SignUp } from './auth/SignUp'; +import { ResetCredentials } from './auth/ResetCredentials'; import { NoPageFound } from './shared/NoPageFound'; import { PageRoutes } from '../conf/Global'; import { isLoggedInVar } from './auth/checkAuthStatus'; @@ -32,6 +34,8 @@ export const Routes = (): JSX.Element => { return ( + + } /> {/* Starting the react app locally opens /assets by default. For a smoother dev experience, we'll redirect to the homepage */} } exact /> diff --git a/datahub-web-react/src/app/analytics/event.ts b/datahub-web-react/src/app/analytics/event.ts index e4ca1cf86bcabc..d49ff9dfef33ec 100644 --- a/datahub-web-react/src/app/analytics/event.ts +++ b/datahub-web-react/src/app/analytics/event.ts @@ -20,6 +20,8 @@ export enum EventType { SearchAcrossLineageEvent, SearchAcrossLineageResultsViewEvent, DownloadAsCsvEvent, + SignUpEvent, + ResetCredentialsEvent, } /** @@ -40,6 +42,14 @@ export interface PageViewEvent extends BaseEvent { type: EventType.PageViewEvent; } +/** + * Logged on successful new user sign up. + */ +export interface SignUpEvent extends BaseEvent { + type: EventType.SignUpEvent; + title: string; +} + /** * Logged on user successful login. */ @@ -54,6 +64,13 @@ export interface LogOutEvent extends BaseEvent { type: EventType.LogOutEvent; } +/** + * Logged on user resetting their credentials + */ +export interface ResetCredentialsEvent extends BaseEvent { + type: EventType.ResetCredentialsEvent; +} + /** * Logged on user successful search query. */ @@ -189,8 +206,10 @@ export interface DownloadAsCsvEvent extends BaseEvent { */ export type Event = | PageViewEvent + | SignUpEvent | LogInEvent | LogOutEvent + | ResetCredentialsEvent | SearchEvent | SearchResultsViewEvent | SearchResultClickEvent diff --git a/datahub-web-react/src/app/auth/ResetCredentials.tsx b/datahub-web-react/src/app/auth/ResetCredentials.tsx new file mode 100644 index 00000000000000..310a0746964ed3 --- /dev/null +++ b/datahub-web-react/src/app/auth/ResetCredentials.tsx @@ -0,0 +1,169 @@ +import React, { useCallback, useState } from 'react'; +import { Input, Button, Form, message, Image } from 'antd'; +import { UserOutlined, LockOutlined } from '@ant-design/icons'; +import { useReactiveVar } from '@apollo/client'; +import styled, { useTheme } from 'styled-components'; +import { Redirect } from 'react-router'; +import styles from './login.module.css'; +import { Message } from '../shared/Message'; +import { isLoggedInVar } from './checkAuthStatus'; +import analytics, { EventType } from '../analytics'; +import { useAppConfig } from '../useAppConfig'; +import { PageRoutes } from '../../conf/Global'; +import useGetResetTokenFromUrlParams from './useGetResetTokenFromUrlParams'; + +type FormValues = { + email: string; + password: string; + confirmPassword: string; +}; + +const FormInput = styled(Input)` + &&& { + height: 32px; + font-size: 12px; + border: 1px solid #555555; + border-radius: 5px; + background-color: transparent; + color: white; + line-height: 1.5715; + } + > .ant-input { + color: white; + font-size: 14px; + background-color: transparent; + } + > .ant-input:hover { + color: white; + font-size: 14px; + background-color: transparent; + } +`; + +export type ResetCredentialsProps = Record; + +export const ResetCredentials: React.VFC = () => { + const isLoggedIn = useReactiveVar(isLoggedInVar); + const resetToken = useGetResetTokenFromUrlParams(); + + const themeConfig = useTheme(); + const [loading, setLoading] = useState(false); + + const { refreshContext } = useAppConfig(); + + const handleResetCredentials = useCallback( + (values: FormValues) => { + setLoading(true); + const requestOptions = { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + email: values.email, + password: values.password, + resetToken, + }), + }; + fetch('/resetNativeUserCredentials', requestOptions) + .then(async (response) => { + if (!response.ok) { + const data = await response.json(); + const error = (data && data.message) || response.status; + return Promise.reject(error); + } + isLoggedInVar(true); + refreshContext(); + analytics.event({ type: EventType.ResetCredentialsEvent }); + return Promise.resolve(); + }) + .catch((error) => { + message.error(`Failed to log in! ${error}`); + }) + .finally(() => setLoading(false)); + }, + [refreshContext, resetToken], + ); + + if (isLoggedIn && !loading) { + return ; + } + + return ( +
+
+
+ +
+
+ {loading && } +
+ Email} + > + } data-testid="email" /> + + ({ + validator() { + if (getFieldValue('password').length < 8) { + return Promise.reject( + new Error('Your password is less than 8 characters!'), + ); + } + return Promise.resolve(); + }, + }), + ]} + name="password" + // eslint-disable-next-line jsx-a11y/label-has-associated-control + label={} + > + } type="password" data-testid="password" /> + + ({ + validator() { + if (getFieldValue('confirmPassword') !== getFieldValue('password')) { + return Promise.reject(new Error('Your passwords do not match!')); + } + return Promise.resolve(); + }, + }), + ]} + name="confirmPassword" + // eslint-disable-next-line jsx-a11y/label-has-associated-control + label={} + > + } type="password" data-testid="confirmPassword" /> + + + {({ getFieldsValue }) => { + const { email, password, confirmPassword } = getFieldsValue(); + const fieldsAreNotEmpty = !!email && !!password && !!confirmPassword; + const passwordsMatch = password === confirmPassword; + const formIsComplete = fieldsAreNotEmpty && passwordsMatch; + return ( + + ); + }} + +
+
+
+
+ ); +}; diff --git a/datahub-web-react/src/app/auth/SignUp.tsx b/datahub-web-react/src/app/auth/SignUp.tsx new file mode 100644 index 00000000000000..ffda9ce85c63c9 --- /dev/null +++ b/datahub-web-react/src/app/auth/SignUp.tsx @@ -0,0 +1,209 @@ +import React, { useCallback, useState } from 'react'; +import { Input, Button, Form, message, Image, Select } from 'antd'; +import { UserOutlined, LockOutlined } from '@ant-design/icons'; +import { useReactiveVar } from '@apollo/client'; +import styled, { useTheme } from 'styled-components/macro'; +import { Redirect } from 'react-router'; +import styles from './login.module.css'; +import { Message } from '../shared/Message'; +import { isLoggedInVar } from './checkAuthStatus'; +import analytics, { EventType } from '../analytics'; +import { useAppConfig } from '../useAppConfig'; +import { PageRoutes } from '../../conf/Global'; +import useGetInviteTokenFromUrlParams from './useGetInviteTokenFromUrlParams'; + +type FormValues = { + fullName: string; + email: string; + password: string; + confirmPassword: string; + title: string; +}; + +const FormInput = styled(Input)` + &&& { + height: 32px; + font-size: 12px; + border: 1px solid #555555; + border-radius: 5px; + background-color: transparent; + color: white; + line-height: 1.5715; + } + > .ant-input { + color: white; + font-size: 14px; + background-color: transparent; + } + > .ant-input:hover { + color: white; + font-size: 14px; + background-color: transparent; + } +`; + +const TitleSelector = styled(Select)` + .ant-select-selector { + color: white; + border: 1px solid #555555 !important; + background-color: transparent !important; + } + .ant-select-arrow { + color: white; + } +`; + +export type SignUpProps = Record; + +export const SignUp: React.VFC = () => { + const isLoggedIn = useReactiveVar(isLoggedInVar); + const inviteToken = useGetInviteTokenFromUrlParams(); + + const themeConfig = useTheme(); + const [loading, setLoading] = useState(false); + + const { refreshContext } = useAppConfig(); + + const handleSignUp = useCallback( + (values: FormValues) => { + setLoading(true); + const requestOptions = { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + fullName: values.fullName, + email: values.email, + password: values.password, + title: values.title, + inviteToken, + }), + }; + fetch('/signUp', requestOptions) + .then(async (response) => { + if (!response.ok) { + const data = await response.json(); + const error = (data && data.message) || response.status; + return Promise.reject(error); + } + isLoggedInVar(true); + refreshContext(); + analytics.event({ type: EventType.SignUpEvent, title: values.title }); + return Promise.resolve(); + }) + .catch((error) => { + message.error(`Failed to log in! ${error}`); + }) + .finally(() => setLoading(false)); + }, + [refreshContext, inviteToken], + ); + + if (isLoggedIn && !loading) { + return ; + } + + return ( +
+
+
+ +
+
+ {loading && } +
+ Email} + > + } data-testid="email" /> + + Full Name} + > + } data-testid="name" /> + + ({ + validator() { + if (getFieldValue('password').length < 8) { + return Promise.reject( + new Error('Your password is less than 8 characters!'), + ); + } + return Promise.resolve(); + }, + }), + ]} + name="password" + // eslint-disable-next-line jsx-a11y/label-has-associated-control + label={} + > + } type="password" data-testid="password" /> + + ({ + validator() { + if (getFieldValue('confirmPassword') !== getFieldValue('password')) { + return Promise.reject(new Error('Your passwords do not match!')); + } + return Promise.resolve(); + }, + }), + ]} + name="confirmPassword" + // eslint-disable-next-line jsx-a11y/label-has-associated-control + label={} + > + } type="password" data-testid="confirmPassword" /> + + Title} + > + + Data Analyst + Data Engineer + Data Scientist + Software Engineer + Manager + Product Manager + Other + + + + {({ getFieldsValue }) => { + const { fullName, email, password, confirmPassword, title } = getFieldsValue(); + const fieldsAreNotEmpty = + !!fullName && !!email && !!password && !!confirmPassword && !!title; + const passwordsMatch = password === confirmPassword; + const formIsComplete = fieldsAreNotEmpty && passwordsMatch; + return ( + + ); + }} + +
+
+
+
+ ); +}; diff --git a/datahub-web-react/src/app/auth/useGetInviteTokenFromUrlParams.tsx b/datahub-web-react/src/app/auth/useGetInviteTokenFromUrlParams.tsx new file mode 100644 index 00000000000000..7654b7c8b4ef67 --- /dev/null +++ b/datahub-web-react/src/app/auth/useGetInviteTokenFromUrlParams.tsx @@ -0,0 +1,9 @@ +import * as QueryString from 'query-string'; +import { useLocation } from 'react-router-dom'; + +export default function useGetInviteTokenFromUrlParams() { + const location = useLocation(); + const params = QueryString.parse(location.search, { arrayFormat: 'comma' }); + const inviteToken: string = params.invite_token as string; + return inviteToken; +} diff --git a/datahub-web-react/src/app/auth/useGetResetTokenFromUrlParams.tsx b/datahub-web-react/src/app/auth/useGetResetTokenFromUrlParams.tsx new file mode 100644 index 00000000000000..ef9c3409f74af7 --- /dev/null +++ b/datahub-web-react/src/app/auth/useGetResetTokenFromUrlParams.tsx @@ -0,0 +1,9 @@ +import * as QueryString from 'query-string'; +import { useLocation } from 'react-router-dom'; + +export default function useGetResetTokenFromUrlParams() { + const location = useLocation(); + const params = QueryString.parse(location.search, { arrayFormat: 'comma' }); + const resetToken: string = params.reset_token as string; + return resetToken; +} diff --git a/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx b/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx new file mode 100644 index 00000000000000..5b303f75e2985a --- /dev/null +++ b/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx @@ -0,0 +1,205 @@ +import { message, Button, Modal, Select, Tag } from 'antd'; +import React, { useState } from 'react'; +import styled from 'styled-components/macro'; +import { useAddRelatedTermsMutation } from '../../../../graphql/glossaryTerm.generated'; +import { useGetSearchResultsLazyQuery } from '../../../../graphql/search.generated'; +import { EntityType, SearchResult, TermRelationshipType } from '../../../../types.generated'; +import GlossaryBrowser from '../../../glossary/GlossaryBrowser/GlossaryBrowser'; +import ClickOutside from '../../../shared/ClickOutside'; +import { BrowserWrapper } from '../../../shared/tags/AddTagsTermsModal'; +import TermLabel from '../../../shared/TermLabel'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import { useEntityData, useRefetch } from '../../shared/EntityContext'; + +const StyledSelect = styled(Select)` + width: 480px; +`; + +interface Props { + onClose: () => void; + relationshipType: TermRelationshipType; +} + +function AddRelatedTermsModal(props: Props) { + const { onClose, relationshipType } = props; + + const [inputValue, setInputValue] = useState(''); + const [selectedUrns, setSelectedUrns] = useState([]); + const [selectedTerms, setSelectedTerms] = useState([]); + const [isFocusedOnInput, setIsFocusedOnInput] = useState(false); + const entityRegistry = useEntityRegistry(); + const { urn: entityDataUrn } = useEntityData(); + const refetch = useRefetch(); + + const [AddRelatedTerms] = useAddRelatedTermsMutation(); + + function addTerms() { + AddRelatedTerms({ + variables: { + input: { + urn: entityDataUrn, + termUrns: selectedUrns, + relationshipType, + }, + }, + }) + .catch((e) => { + message.destroy(); + message.error({ content: `Failed to move: \n ${e.message || ''}`, duration: 3 }); + }) + .finally(() => { + message.loading({ content: 'Adding...', duration: 2 }); + setTimeout(() => { + message.success({ + content: 'Added Related Terms!', + duration: 2, + }); + refetch(); + }, 2000); + }); + onClose(); + } + + const [termSearch, { data: termSearchData }] = useGetSearchResultsLazyQuery(); + const termSearchResults = termSearchData?.search?.searchResults || []; + + const tagSearchOptions = termSearchResults.map((result: SearchResult) => { + const displayName = entityRegistry.getDisplayName(result.entity.type, result.entity); + + return ( + + + + ); + }); + + const handleSearch = (text: string) => { + if (text.length > 0) { + termSearch({ + variables: { + input: { + type: EntityType.GlossaryTerm, + query: text, + start: 0, + count: 20, + }, + }, + }); + } + }; + + // When a Tag or term search result is selected, add the urn to the Urns + const onSelectValue = (urn: string) => { + const newUrns = [...selectedUrns, urn]; + setSelectedUrns(newUrns); + const selectedSearchOption = tagSearchOptions.find((option) => option.props.value === urn); + setSelectedTerms([...selectedTerms, { urn, component: }]); + }; + + // When a Tag or term search result is deselected, remove the urn from the Owners + const onDeselectValue = (urn: string) => { + const newUrns = selectedUrns.filter((u) => u !== urn); + setSelectedUrns(newUrns); + setInputValue(''); + setIsFocusedOnInput(true); + setSelectedTerms(selectedTerms.filter((term) => term.urn !== urn)); + }; + + function selectTermFromBrowser(urn: string, displayName: string) { + setIsFocusedOnInput(false); + const newUrns = [...selectedUrns, urn]; + setSelectedUrns(newUrns); + setSelectedTerms([...selectedTerms, { urn, component: }]); + } + + function clearInput() { + setInputValue(''); + setTimeout(() => setIsFocusedOnInput(true), 0); // call after click outside + } + + function handleBlur() { + setInputValue(''); + } + + const tagRender = (properties) => { + // eslint-disable-next-line react/prop-types + const { closable, onClose: close, value } = properties; + const onPreventMouseDown = (event) => { + event.preventDefault(); + event.stopPropagation(); + }; + const selectedItem = selectedTerms.find((term) => term.urn === value).component; + + return ( + + {selectedItem} + + ); + }; + + const isShowingGlossaryBrowser = !inputValue && isFocusedOnInput; + + return ( + + + + + } + > + setIsFocusedOnInput(false)}> + onSelectValue(asset)} + onDeselect={(asset: any) => onDeselectValue(asset)} + onSearch={(value: string) => { + // eslint-disable-next-line react/prop-types + handleSearch(value.trim()); + // eslint-disable-next-line react/prop-types + setInputValue(value.trim()); + }} + tagRender={tagRender} + value={selectedUrns} + onClear={clearInput} + onFocus={() => setIsFocusedOnInput(true)} + onBlur={handleBlur} + dropdownStyle={isShowingGlossaryBrowser || !inputValue ? { display: 'none' } : {}} + > + {tagSearchOptions} + + + + + + + ); +} + +export default AddRelatedTermsModal; diff --git a/datahub-web-react/src/app/entity/glossaryTerm/profile/GlossaryRelatedTerms.tsx b/datahub-web-react/src/app/entity/glossaryTerm/profile/GlossaryRelatedTerms.tsx index f20b76a9a14785..61ea2c329959ad 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/profile/GlossaryRelatedTerms.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/profile/GlossaryRelatedTerms.tsx @@ -1,26 +1,23 @@ import { Menu } from 'antd'; import React, { useEffect, useState } from 'react'; -import styled from 'styled-components'; +import styled from 'styled-components/macro'; import { useEntityData } from '../../shared/EntityContext'; -import GlossaryRelatedTermsResult from './GlossaryRelatedTermsResult'; - -export enum RelatedTermTypes { - hasRelatedTerms = 'Contains', - isRelatedTerms = 'Inherits', -} +import GlossaryRelatedTermsResult, { RelatedTermTypes } from './GlossaryRelatedTermsResult'; const DetailWrapper = styled.div` display: inline-flex; + flex: 1; width: 100%; `; const MenuWrapper = styled.div` - border: 2px solid #f5f5f5; + border-right: 2px solid #f5f5f5; `; const Content = styled.div` - margin-left: 32px; flex-grow: 1; + max-width: 100%; + overflow: hidden; `; export default function GlossayRelatedTerms() { diff --git a/datahub-web-react/src/app/entity/glossaryTerm/profile/GlossaryRelatedTermsResult.tsx b/datahub-web-react/src/app/entity/glossaryTerm/profile/GlossaryRelatedTermsResult.tsx index e787bbf5a0abdf..b5c18d5d58ea0e 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/profile/GlossaryRelatedTermsResult.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/profile/GlossaryRelatedTermsResult.tsx @@ -1,12 +1,17 @@ -import { QueryResult } from '@apollo/client'; -import { Divider, List, Typography } from 'antd'; -import React from 'react'; -import styled from 'styled-components'; -import { GetGlossaryTermQuery, useGetGlossaryTermQuery } from '../../../../graphql/glossaryTerm.generated'; -import { EntityType, Exact } from '../../../../types.generated'; +import { PlusOutlined } from '@ant-design/icons'; +import { Button, Typography } from 'antd'; +import React, { useState } from 'react'; +import styled from 'styled-components/macro'; +import { TermRelationshipType } from '../../../../types.generated'; import { Message } from '../../../shared/Message'; -import { useEntityRegistry } from '../../../useEntityRegistry'; -import { PreviewType } from '../../Entity'; +import { ANTD_GRAY } from '../../shared/constants'; +import AddRelatedTermsModal from './AddRelatedTermsModal'; +import RelatedTerm from './RelatedTerm'; + +export enum RelatedTermTypes { + hasRelatedTerms = 'Contains', + isRelatedTerms = 'Inherits', +} export type Props = { glossaryRelatedTermType: string; @@ -14,47 +19,32 @@ export type Props = { }; const ListContainer = styled.div` - display: default; - flex-grow: default; + width: 100%; `; const TitleContainer = styled.div` + align-items: center; + border-bottom: solid 1px ${ANTD_GRAY[4]}; + display: flex; + justify-content: space-between; + padding: 15px 20px; margin-bottom: 30px; `; -const ListItem = styled.div` - margin: 40px; - padding-bottom: 5px; -`; - -const Profile = styled.div` - marging-bottom: 20px; -`; - const messageStyle = { marginTop: '10%' }; export default function GlossaryRelatedTermsResult({ glossaryRelatedTermType, glossaryRelatedTermResult }: Props) { - const entityRegistry = useEntityRegistry(); + const [isShowingAddModal, setIsShowingAddModal] = useState(false); const glossaryRelatedTermUrns: Array = []; glossaryRelatedTermResult.forEach((item: any) => { glossaryRelatedTermUrns.push(item?.entity?.urn); }); - const glossaryTermInfo: QueryResult>[] = []; - - for (let i = 0; i < glossaryRelatedTermUrns.length; i++) { - glossaryTermInfo.push( - // eslint-disable-next-line react-hooks/rules-of-hooks - useGetGlossaryTermQuery({ - variables: { - urn: glossaryRelatedTermUrns[i], - }, - }), - ); - } + const contentLoading = false; + const relationshipType = + glossaryRelatedTermType === RelatedTermTypes.hasRelatedTerms + ? TermRelationshipType.HasA + : TermRelationshipType.IsA; - const contentLoading = glossaryTermInfo.some((item) => { - return item.loading; - }); return ( <> {contentLoading ? ( @@ -62,28 +52,21 @@ export default function GlossaryRelatedTermsResult({ glossaryRelatedTermType, gl ) : ( - {glossaryRelatedTermType} - + + {glossaryRelatedTermType} + + - { - return ( - - - {entityRegistry.renderPreview( - EntityType.GlossaryTerm, - PreviewType.PREVIEW, - item?.data?.glossaryTerm, - )} - - - - ); - }} - /> + {glossaryRelatedTermUrns.map((urn) => ( + + ))} )} + {isShowingAddModal && ( + setIsShowingAddModal(false)} relationshipType={relationshipType} /> + )} ); } diff --git a/datahub-web-react/src/app/entity/glossaryTerm/profile/RelatedTerm.tsx b/datahub-web-react/src/app/entity/glossaryTerm/profile/RelatedTerm.tsx new file mode 100644 index 00000000000000..e7f6cc316567a1 --- /dev/null +++ b/datahub-web-react/src/app/entity/glossaryTerm/profile/RelatedTerm.tsx @@ -0,0 +1,77 @@ +import { DeleteOutlined, MoreOutlined } from '@ant-design/icons'; +import { Divider, Dropdown, Menu } from 'antd'; +import React from 'react'; +import styled from 'styled-components/macro'; +import { useGetGlossaryTermQuery } from '../../../../graphql/glossaryTerm.generated'; +import { EntityType, TermRelationshipType } from '../../../../types.generated'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import { PreviewType } from '../../Entity'; +import useRemoveRelatedTerms from './useRemoveRelatedTerms'; + +const ListItem = styled.div` + margin: 0 20px; +`; + +const Profile = styled.div` + display: felx; + marging-bottom: 20px; +`; + +const MenuIcon = styled(MoreOutlined)` + display: flex; + justify-content: center; + align-items: center; + font-size: 20px; + height: 32px; + margin-left: -10px; +`; + +const MenuItem = styled.div` + font-size: 12px; + padding: 0 4px; + color: #262626; +`; + +interface Props { + urn: string; + relationshipType: TermRelationshipType; +} + +function RelatedTerm(props: Props) { + const { urn, relationshipType } = props; + + const entityRegistry = useEntityRegistry(); + const { data, loading } = useGetGlossaryTermQuery({ variables: { urn } }); + let displayName = ''; + if (data) { + displayName = entityRegistry.getDisplayName(EntityType.GlossaryTerm, data.glossaryTerm); + } + const { onRemove } = useRemoveRelatedTerms(urn, relationshipType, displayName); + + if (loading) return null; + + return ( + + + {entityRegistry.renderPreview(EntityType.GlossaryTerm, PreviewType.PREVIEW, data?.glossaryTerm)} + + + +   Remove Term + + + + } + trigger={['click']} + > + + + + + + ); +} + +export default RelatedTerm; diff --git a/datahub-web-react/src/app/entity/glossaryTerm/profile/useRemoveRelatedTerms.tsx b/datahub-web-react/src/app/entity/glossaryTerm/profile/useRemoveRelatedTerms.tsx new file mode 100644 index 00000000000000..0eb46924243cdb --- /dev/null +++ b/datahub-web-react/src/app/entity/glossaryTerm/profile/useRemoveRelatedTerms.tsx @@ -0,0 +1,60 @@ +import { message, Modal } from 'antd'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import { useEntityData, useRefetch } from '../../shared/EntityContext'; +import { useRemoveRelatedTermsMutation } from '../../../../graphql/glossaryTerm.generated'; +import { TermRelationshipType } from '../../../../types.generated'; + +function useRemoveRelatedTerms(termUrn: string, relationshipType: TermRelationshipType, displayName: string) { + const { urn, entityType } = useEntityData(); + const entityRegistry = useEntityRegistry(); + const refetch = useRefetch(); + + const [removeRelatedTerms] = useRemoveRelatedTermsMutation(); + + function handleRemoveRelatedTerms() { + removeRelatedTerms({ + variables: { + input: { + urn, + termUrns: [termUrn], + relationshipType, + }, + }, + }) + .catch((e) => { + message.destroy(); + message.error({ content: `Failed to remove: \n ${e.message || ''}`, duration: 3 }); + }) + .finally(() => { + message.loading({ + content: 'Removing...', + duration: 2, + }); + setTimeout(() => { + refetch(); + message.success({ + content: `Removed Glossary Term!`, + duration: 2, + }); + }, 2000); + }); + } + + function onRemove() { + Modal.confirm({ + title: `Remove ${displayName}`, + content: `Are you sure you want to remove this ${entityRegistry.getEntityName(entityType)}?`, + onOk() { + handleRemoveRelatedTerms(); + }, + onCancel() {}, + okText: 'Yes', + maskClosable: true, + closable: true, + }); + } + + return { onRemove }; +} + +export default useRemoveRelatedTerms; diff --git a/datahub-web-react/src/app/entity/group/AddGroupMembersModal.tsx b/datahub-web-react/src/app/entity/group/AddGroupMembersModal.tsx index 2d295cda587183..b950db6f1a16b0 100644 --- a/datahub-web-react/src/app/entity/group/AddGroupMembersModal.tsx +++ b/datahub-web-react/src/app/entity/group/AddGroupMembersModal.tsx @@ -1,7 +1,6 @@ -import React, { useRef, useState } from 'react'; +import React, { useState } from 'react'; import { message, Modal, Button, Form, Select, Tag } from 'antd'; import styled from 'styled-components'; -import { Link } from 'react-router-dom'; import { useAddGroupMembersMutation } from '../../../graphql/group.generated'; import { CorpUser, EntityType, SearchResult } from '../../../types.generated'; import { CustomAvatar } from '../../shared/avatar'; @@ -11,7 +10,7 @@ import { useEntityRegistry } from '../../useEntityRegistry'; type Props = { urn: string; visible: boolean; - onClose: () => void; + onCloseModal: () => void; onSubmit: () => void; }; @@ -19,70 +18,44 @@ const SearchResultContainer = styled.div` display: flex; justify-content: space-between; align-items: center; - padding: 12px; + padding: 2px; `; const SearchResultContent = styled.div` display: flex; - justify-content: start; + justify-content: center; align-items: center; `; -const SearchResultDisplayName = styled.div` - margin-left: 12px; +const SelectInput = styled(Select)` + > .ant-select-selector { + height: 36px; + } +`; + +const StyleTag = styled(Tag)` + padding: 0px 7px 0px 0px; + margin-right: 3px; + display: flex; + justify-content: start; + align-items: center; `; -export const AddGroupMembersModal = ({ urn, visible, onClose, onSubmit }: Props) => { +export const AddGroupMembersModal = ({ urn, visible, onCloseModal, onSubmit }: Props) => { const entityRegistry = useEntityRegistry(); - const [selectedUsers, setSelectedUsers] = useState>([]); - const [userSearch, { data: userSearchData }] = useGetSearchResultsLazyQuery(); + const [selectedMembers, setSelectedMembers] = useState([]); const [addGroupMembersMutation] = useAddGroupMembersMutation(); + const [userSearch, { data: userSearchData }] = useGetSearchResultsLazyQuery(); const searchResults = userSearchData?.search?.searchResults || []; - const inputEl = useRef(null); - - const onAdd = async () => { - if (selectedUsers.length === 0) { - return; - } - addGroupMembersMutation({ - variables: { - groupUrn: urn, - userUrns: selectedUsers.map((user) => user.urn), - }, - }) - .catch((e) => { - message.destroy(); - message.error({ content: `Failed to add group members!: \n ${e.message || ''}`, duration: 3 }); - }) - .finally(() => { - message.success({ - content: `Group members added!`, - duration: 3, - }); - onSubmit(); - setSelectedUsers([]); - }); - onClose(); + const onSelectMember = (newMemberUrn: string) => { + const newUsers = [...(selectedMembers || []), newMemberUrn]; + setSelectedMembers(newUsers); }; - const onSelectMember = (newUserUrn: string) => { - if (inputEl && inputEl.current) { - (inputEl.current as any).blur(); - } - const filteredUsers = searchResults - .filter((result) => result.entity.urn === newUserUrn) - .map((result) => result.entity); - if (filteredUsers.length) { - const newUser = filteredUsers[0] as CorpUser; - const newUsers = [...(selectedUsers || []), newUser]; - setSelectedUsers(newUsers); - } - }; - - const onDeselectMember = (userUrn: string) => { - const newUserActors = selectedUsers.filter((user) => user.urn !== userUrn); - setSelectedUsers(newUserActors); + const onDeselectMember = (memberUrn: string) => { + const newUserActors = selectedMembers.filter((user) => user !== memberUrn); + setSelectedMembers(newUserActors); }; const handleUserSearch = (text: string) => { @@ -107,33 +80,63 @@ export const AddGroupMembersModal = ({ urn, visible, onClose, onSubmit }: Props) return ( - - -
{displayName}
-
+ +
{displayName}
- `/${entityRegistry.getPathName(result.entity.type)}/${result.entity.urn}`} - > - View - {' '}
); }; + const tagRender = (props) => { + // eslint-disable-next-line react/prop-types + const { label, closable, onClose } = props; + const onPreventMouseDown = (event) => { + event.preventDefault(); + event.stopPropagation(); + }; + return ( + + {label} + + ); + }; + + const onAdd = async () => { + if (selectedMembers.length === 0) { + return; + } + addGroupMembersMutation({ + variables: { + groupUrn: urn, + userUrns: selectedMembers, + }, + }) + .catch((e) => { + message.destroy(); + message.error({ content: `Failed to add group members!: \n ${e.message || ''}`, duration: 3 }); + }) + .finally(() => { + message.success({ + content: `Group members added!`, + duration: 3, + }); + onSubmit(); + setSelectedMembers([]); + }); + onCloseModal(); + }; + return ( - - @@ -141,23 +144,22 @@ export const AddGroupMembersModal = ({ urn, visible, onClose, onSubmit }: Props) >
- +
diff --git a/datahub-web-react/src/app/entity/group/GroupMembers.tsx b/datahub-web-react/src/app/entity/group/GroupMembers.tsx index 63a06f64a90e64..c6d86f403e0398 100644 --- a/datahub-web-react/src/app/entity/group/GroupMembers.tsx +++ b/datahub-web-react/src/app/entity/group/GroupMembers.tsx @@ -228,7 +228,7 @@ export default function GroupMembers({ urn, pageSize, onChangeMembers }: Props) urn={urn} visible={isEditingMembers} onSubmit={onAddMembers} - onClose={() => setIsEditingMembers(false)} + onCloseModal={() => setIsEditingMembers(false)} /> ); diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx index e2b24ae9e78a4d..86c2b84a67c3d1 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx @@ -29,7 +29,7 @@ function NodeParentSelect(props: Props) { const [isFocusedOnInput, setIsFocusedOnInput] = useState(false); const [searchQuery, setSearchQuery] = useState(''); const entityRegistry = useEntityRegistry(); - const { entityData, urn: entityDataUrn } = useEntityData(); + const { entityData, urn: entityDataUrn, entityType } = useEntityData(); const [nodeSearch, { data: nodeData }] = useGetSearchResultsLazyQuery(); let nodeSearchResults = nodeData?.search?.searchResults || []; @@ -82,6 +82,7 @@ function NodeParentSelect(props: Props) { } const isShowingGlossaryBrowser = !searchQuery && isFocusedOnInput; + const shouldHideSelf = isMoving && entityType === EntityType.GlossaryNode; return ( setIsFocusedOnInput(false)}> @@ -103,7 +104,12 @@ function NodeParentSelect(props: Props) { ))} - + ); diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx index 036c800850e523..9c170a1bbe5ac0 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx @@ -63,7 +63,6 @@ const ContentContainer = styled.div` const HeaderAndTabs = styled.div` flex-grow: 1; min-width: 640px; - height: 100%; `; const HeaderAndTabsFlex = styled.div` diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx index 2d2c7cf7b2f8aa..ff21db388c08ff 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx @@ -64,6 +64,18 @@ const DeprecatedText = styled.div` margin-left: 5px; `; +const DeprecatedTitle = styled(Typography.Text)` + display: block; + font-size: 14px; + margin-bottom: 5px; + font-weight: bold; +`; + +const DeprecatedSubTitle = styled(Typography.Text)` + display: block; + margin-bottom: 5px; +`; + const LastEvaluatedAtLabel = styled.div` padding: 0; margin: 0; @@ -131,9 +143,7 @@ export const EntityHeader = ({ refreshBrowser, headerDropdownItems, isNameEditab (entityData?.deprecation?.decommissionTime && `Scheduled to be decommissioned on ${moment .unix(entityData?.deprecation?.decommissionTime) - .format('DD/MMM/YYYY')} at ${moment - .unix(entityData?.deprecation?.decommissionTime) - .format('HH:mm:ss')} (${localeTimezone})`) || + .format('DD/MMM/YYYY')} (${localeTimezone})`) || undefined; const decommissionTimeGMT = entityData?.deprecation?.decommissionTime && @@ -157,9 +167,12 @@ export const EntityHeader = ({ refreshBrowser, headerDropdownItems, isNameEditab hasDetails ? ( <> {entityData?.deprecation?.note !== '' && ( - {entityData?.deprecation?.note} + Note )} {isDividerNeeded && } + {entityData?.deprecation?.note !== '' && ( + {entityData?.deprecation?.note} + )} {entityData?.deprecation?.decommissionTime !== null && ( diff --git a/datahub-web-react/src/app/glossary/GlossaryBrowser/GlossaryBrowser.tsx b/datahub-web-react/src/app/glossary/GlossaryBrowser/GlossaryBrowser.tsx index 425026e6172cb4..b5c256f478b88d 100644 --- a/datahub-web-react/src/app/glossary/GlossaryBrowser/GlossaryBrowser.tsx +++ b/datahub-web-react/src/app/glossary/GlossaryBrowser/GlossaryBrowser.tsx @@ -20,13 +20,23 @@ interface Props { hideTerms?: boolean; openToEntity?: boolean; refreshBrowser?: boolean; + nodeUrnToHide?: string; selectTerm?: (urn: string, displayName: string) => void; selectNode?: (urn: string, displayName: string) => void; } function GlossaryBrowser(props: Props) { - const { rootNodes, rootTerms, isSelecting, hideTerms, refreshBrowser, openToEntity, selectTerm, selectNode } = - props; + const { + rootNodes, + rootTerms, + isSelecting, + hideTerms, + refreshBrowser, + openToEntity, + nodeUrnToHide, + selectTerm, + selectNode, + } = props; const { data: nodesData, refetch: refetchNodes } = useGetRootGlossaryNodesQuery({ skip: !!rootNodes }); const { data: termsData, refetch: refetchTerms } = useGetRootGlossaryTermsQuery({ skip: !!rootTerms }); @@ -51,6 +61,7 @@ function GlossaryBrowser(props: Props) { hideTerms={hideTerms} openToEntity={openToEntity} refreshBrowser={refreshBrowser} + nodeUrnToHide={nodeUrnToHide} selectTerm={selectTerm} selectNode={selectNode} /> diff --git a/datahub-web-react/src/app/glossary/GlossaryBrowser/NodeItem.tsx b/datahub-web-react/src/app/glossary/GlossaryBrowser/NodeItem.tsx index 61136928ae53fc..aa64d58493b1ed 100644 --- a/datahub-web-react/src/app/glossary/GlossaryBrowser/NodeItem.tsx +++ b/datahub-web-react/src/app/glossary/GlossaryBrowser/NodeItem.tsx @@ -49,17 +49,22 @@ interface Props { hideTerms?: boolean; openToEntity?: boolean; refreshBrowser?: boolean; + nodeUrnToHide?: string; selectTerm?: (urn: string, displayName: string) => void; selectNode?: (urn: string, displayName: string) => void; } function NodeItem(props: Props) { - const { node, isSelecting, hideTerms, openToEntity, refreshBrowser, selectTerm, selectNode } = props; + const { node, isSelecting, hideTerms, openToEntity, refreshBrowser, nodeUrnToHide, selectTerm, selectNode } = props; + const shouldHideNode = nodeUrnToHide === node.urn; const [areChildrenVisible, setAreChildrenVisible] = useState(false); const entityRegistry = useEntityRegistry(); const { entityData } = useEntityData(); - const { data } = useGetGlossaryNodeQuery({ variables: { urn: node.urn }, skip: !areChildrenVisible }); + const { data } = useGetGlossaryNodeQuery({ + variables: { urn: node.urn }, + skip: !areChildrenVisible || shouldHideNode, + }); useEffect(() => { if (openToEntity && entityData && entityData.parentNodes?.nodes.some((parent) => parent.urn === node.urn)) { @@ -94,6 +99,8 @@ function NodeItem(props: Props) { ?.filter((child) => child.entity?.type === EntityType.GlossaryTerm) .map((child) => child.entity) || []; + if (shouldHideNode) return null; + return ( @@ -123,6 +130,7 @@ function NodeItem(props: Props) { isSelecting={isSelecting} hideTerms={hideTerms} openToEntity={openToEntity} + nodeUrnToHide={nodeUrnToHide} selectTerm={selectTerm} selectNode={selectNode} /> diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index 43d400bfbea998..9e7b19cef66882 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -1,7 +1,8 @@ import React, { useEffect, useState } from 'react'; -import { Empty, List, message, Pagination } from 'antd'; +import { Button, Empty, List, message, Pagination } from 'antd'; import styled from 'styled-components'; import * as QueryString from 'query-string'; +import { UsergroupAddOutlined } from '@ant-design/icons'; import { useLocation } from 'react-router'; import UserListItem from './UserListItem'; import { Message } from '../../shared/Message'; @@ -10,6 +11,8 @@ import { CorpUser } from '../../../types.generated'; import TabToolbar from '../../entity/shared/components/styled/TabToolbar'; import { SearchBar } from '../../search/SearchBar'; import { useEntityRegistry } from '../../useEntityRegistry'; +import ViewInviteTokenModal from './ViewInviteTokenModal'; +import { useGetAuthenticatedUser } from '../../useGetAuthenticatedUser'; const UserContainer = styled.div``; @@ -36,8 +39,12 @@ export const UserList = () => { useEffect(() => setQuery(paramsQuery), [paramsQuery]); const [page, setPage] = useState(1); + const [isViewingInviteToken, setIsViewingInviteToken] = useState(false); const [removedUrns, setRemovedUrns] = useState([]); + const authenticatedUser = useGetAuthenticatedUser(); + const canManageUserCredentials = authenticatedUser?.platformPrivileges.manageUserCredentials || false; + const pageSize = DEFAULT_PAGE_SIZE; const start = (page - 1) * pageSize; @@ -76,7 +83,13 @@ export const UserList = () => {
- <> +
{ }} dataSource={filteredUsers} renderItem={(item: any) => ( - handleDelete(item.urn as string)} user={item as CorpUser} /> + handleDelete(item.urn as string)} + user={item as CorpUser} + canManageUserCredentials={canManageUserCredentials} + /> )} /> @@ -116,6 +133,12 @@ export const UserList = () => { showSizeChanger={false} /> + {canManageUserCredentials && ( + setIsViewingInviteToken(false)} + /> + )}
); diff --git a/datahub-web-react/src/app/identity/user/UserListItem.tsx b/datahub-web-react/src/app/identity/user/UserListItem.tsx index 3e7d3a88ee6818..2080dc2582ca1d 100644 --- a/datahub-web-react/src/app/identity/user/UserListItem.tsx +++ b/datahub-web-react/src/app/identity/user/UserListItem.tsx @@ -1,16 +1,18 @@ -import React from 'react'; +import React, { useState } from 'react'; import styled from 'styled-components'; -import { Button, List, message, Modal, Tag, Tooltip, Typography } from 'antd'; +import { Button, Dropdown, List, Menu, message, Modal, Tag, Tooltip, Typography } from 'antd'; import { Link } from 'react-router-dom'; -import { DeleteOutlined } from '@ant-design/icons'; +import { DeleteOutlined, MoreOutlined, UnlockOutlined } from '@ant-design/icons'; import { CorpUser, CorpUserStatus, EntityType } from '../../../types.generated'; import CustomAvatar from '../../shared/avatar/CustomAvatar'; import { useEntityRegistry } from '../../useEntityRegistry'; import { useRemoveUserMutation } from '../../../graphql/user.generated'; import { ANTD_GRAY, REDESIGN_COLORS } from '../../entity/shared/constants'; +import ViewResetTokenModal from './ViewResetTokenModal'; type Props = { user: CorpUser; + canManageUserCredentials: boolean; onDelete?: () => void; }; @@ -34,9 +36,12 @@ const ButtonGroup = styled.div` align-items: center; `; -export default function UserListItem({ user, onDelete }: Props) { +export default function UserListItem({ user, canManageUserCredentials, onDelete }: Props) { const entityRegistry = useEntityRegistry(); + const [isViewingResetToken, setIsViewingResetToken] = useState(false); const displayName = entityRegistry.getDisplayName(EntityType.CorpUser, user); + const isNativeUser: boolean = user.isNativeUser as boolean; + const shouldShowPasswordReset: boolean = canManageUserCredentials && isNativeUser; const [removeUserMutation] = useRemoveUserMutation(); @@ -118,10 +123,28 @@ export default function UserListItem({ user, onDelete }: Props) { + + setIsViewingResetToken(true)}> +   Reset user password + + + } + > + + + setIsViewingResetToken(false)} + /> ); } diff --git a/datahub-web-react/src/app/identity/user/ViewInviteTokenModal.tsx b/datahub-web-react/src/app/identity/user/ViewInviteTokenModal.tsx new file mode 100644 index 00000000000000..a353af4677f026 --- /dev/null +++ b/datahub-web-react/src/app/identity/user/ViewInviteTokenModal.tsx @@ -0,0 +1,88 @@ +import { RedoOutlined } from '@ant-design/icons'; +import { Button, Modal, Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { PageRoutes } from '../../../conf/Global'; +import { + useCreateNativeUserInviteTokenMutation, + useGetNativeUserInviteTokenQuery, +} from '../../../graphql/user.generated'; + +const ModalSection = styled.div` + display: flex; + flex-direction: column; + padding-bottom: 12px; +`; + +const ModalSectionHeader = styled(Typography.Text)` + &&&& { + padding: 0px; + margin: 0px; + margin-bottom: 4px; + } +`; + +const ModalSectionParagraph = styled(Typography.Paragraph)` + &&&& { + padding: 0px; + margin: 0px; + } +`; + +const CreateInviteTokenButton = styled(Button)` + display: inline-block; + width: 20px; + margin-left: -6px; +`; + +type Props = { + visible: boolean; + onClose: () => void; +}; + +export default function ViewInviteTokenModal({ visible, onClose }: Props) { + const baseUrl = window.location.origin; + const { data: getNativeUserInviteTokenData } = useGetNativeUserInviteTokenQuery({}); + + const [createNativeUserInviteToken, { data: createNativeUserInviteTokenData }] = + useCreateNativeUserInviteTokenMutation({}); + + const inviteToken = createNativeUserInviteTokenData?.createNativeUserInviteToken?.inviteToken + ? createNativeUserInviteTokenData?.createNativeUserInviteToken.inviteToken + : getNativeUserInviteTokenData?.getNativeUserInviteToken?.inviteToken || ''; + + const inviteLink = `${baseUrl}${PageRoutes.SIGN_UP}?invite_token=${inviteToken}`; + + return ( + + Invite new DataHub users +
+ } + visible={visible} + onCancel={onClose} + > + + Share invite link + + Share this invite link with other users in your workspace! + + +
{inviteLink}
+
+
+ + Generate a new link + + Generate a new invite link! Note, any old links will cease to be active. + + createNativeUserInviteToken({})} size="small" type="text"> + + + + + ); +} diff --git a/datahub-web-react/src/app/identity/user/ViewResetTokenModal.tsx b/datahub-web-react/src/app/identity/user/ViewResetTokenModal.tsx new file mode 100644 index 00000000000000..e700a4d002d8d8 --- /dev/null +++ b/datahub-web-react/src/app/identity/user/ViewResetTokenModal.tsx @@ -0,0 +1,108 @@ +import { RedoOutlined } from '@ant-design/icons'; +import { Button, Modal, Typography } from 'antd'; +import React, { useState } from 'react'; +import styled from 'styled-components'; +import { PageRoutes } from '../../../conf/Global'; +import { useCreateNativeUserResetTokenMutation } from '../../../graphql/user.generated'; + +const ModalSection = styled.div` + display: flex; + flex-direction: column; + padding-bottom: 12px; +`; + +const ModalSectionHeader = styled(Typography.Text)` + &&&& { + padding: 0px; + margin: 0px; + margin-bottom: 4px; + } +`; + +const ModalSectionParagraph = styled(Typography.Paragraph)` + &&&& { + padding: 0px; + margin: 0px; + } +`; + +const CreateResetTokenButton = styled(Button)` + display: inline-block; + width: 20px; + margin-left: -6px; +`; + +type Props = { + visible: boolean; + userUrn: string; + username: string; + onClose: () => void; +}; + +export default function ViewResetTokenModal({ visible, userUrn, username, onClose }: Props) { + const baseUrl = window.location.origin; + const [hasGeneratedResetToken, setHasGeneratedResetToken] = useState(false); + + const [createNativeUserResetToken, { data: createNativeUserResetTokenData }] = + useCreateNativeUserResetTokenMutation({}); + + const resetToken = createNativeUserResetTokenData?.createNativeUserResetToken?.resetToken || ''; + + const inviteLink = `${baseUrl}${PageRoutes.RESET_CREDENTIALS}?reset_token=${resetToken}`; + + return ( + + Reset User Password + + } + visible={visible} + onCancel={onClose} + > + {hasGeneratedResetToken ? ( + + Share reset link + + Share this reset link to reset the credentials for {username}. + This link will expire in 24 hours. + + +
{inviteLink}
+
+
+ ) : ( + + A new link must be generated + + You cannot view any old reset links. Please generate a new one below. + + + )} + + Generate a new link + + Generate a new reset link! Note, any old links will cease to be active. + + { + createNativeUserResetToken({ + variables: { + input: { + userUrn, + }, + }, + }); + setHasGeneratedResetToken(true); + }} + size="small" + type="text" + > + + + +
+ ); +} diff --git a/datahub-web-react/src/app/policy/ManagePolicies.tsx b/datahub-web-react/src/app/policy/ManagePolicies.tsx index f7522bce77172f..26a4eab51b7c84 100644 --- a/datahub-web-react/src/app/policy/ManagePolicies.tsx +++ b/datahub-web-react/src/app/policy/ManagePolicies.tsx @@ -248,12 +248,20 @@ export const ManagePolicies = () => { setFocusPolicyUrn(undefined); }; + const onEditPolicy = (policy: Policy) => { + setShowPolicyBuilderModal(true); + setFocusPolicyUrn(policy?.urn); + setFocusPolicy({ ...policy }); + }; + + // On Delete Policy handler const onRemovePolicy = (policy: Policy) => { Modal.confirm({ title: `Delete ${policy?.name}`, content: `Are you sure you want to remove policy?`, onOk() { deletePolicy({ variables: { urn: policy?.urn as string } }); // There must be a focus policy urn. + message.success('Successfully removed policy.'); setTimeout(function () { policiesRefetch(); }, 3000); @@ -266,16 +274,12 @@ export const ManagePolicies = () => { }); }; - const onEditPolicy = (policy: Policy) => { - setShowPolicyBuilderModal(true); - setFocusPolicyUrn(policy?.urn); - setFocusPolicy({ ...policy }); - }; - + // On Activate and deactivate Policy handler const onToggleActiveDuplicate = (policy: Policy) => { + const newState = policy?.state === PolicyState.Active ? PolicyState.Inactive : PolicyState.Active; const newPolicy = { ...policy, - state: policy?.state === PolicyState.Active ? PolicyState.Inactive : PolicyState.Active, + state: newState, }; updatePolicy({ variables: { @@ -283,12 +287,14 @@ export const ManagePolicies = () => { input: toPolicyInput(newPolicy), }, }); + message.success(`Successfully ${newState === PolicyState.Active ? 'activated' : 'deactivated'} policy.`); setTimeout(function () { policiesRefetch(); }, 3000); setShowViewPolicyModal(false); }; + // On Add/Update Policy handler const onSavePolicy = (savePolicy: Omit) => { if (focusPolicyUrn) { // If there's an URN associated with the focused policy, then we are editing an existing policy. @@ -351,6 +357,7 @@ export const ManagePolicies = () => { /> {record?.allUsers ? All Users : null} {record?.allGroups ? All Groups : null} + {record?.resourceOwners ? All Owners : null} ); }, @@ -407,6 +414,7 @@ export const ManagePolicies = () => { const tableData = policies?.map((policy) => ({ allGroups: policy?.actors?.allGroups, allUsers: policy?.actors?.allUsers, + resourceOwners: policy?.actors?.resourceOwners, description: policy?.description, editable: policy?.editable, name: policy?.name, diff --git a/datahub-web-react/src/app/settings/AccessTokens.tsx b/datahub-web-react/src/app/settings/AccessTokens.tsx index f410ce37cc17a3..d31f5f1680f0f2 100644 --- a/datahub-web-react/src/app/settings/AccessTokens.tsx +++ b/datahub-web-react/src/app/settings/AccessTokens.tsx @@ -98,6 +98,7 @@ export const AccessTokens = () => { data: tokensData, refetch: tokensRefetch, } = useListAccessTokensQuery({ + skip: !canGeneratePersonalAccessTokens, variables: { input: { start, diff --git a/datahub-web-react/src/conf/Global.ts b/datahub-web-react/src/conf/Global.ts index 0489c20c4e1cdd..98bfe4b2e597e8 100644 --- a/datahub-web-react/src/conf/Global.ts +++ b/datahub-web-react/src/conf/Global.ts @@ -5,8 +5,11 @@ export enum PageRoutes { /** * Server-side authentication route */ + ROOT = '/', AUTHENTICATE = '/authenticate', + SIGN_UP = '/signup', LOG_IN = '/login', + RESET_CREDENTIALS = '/reset', SEARCH_RESULTS = '/search/:type?', SEARCH = '/search', BROWSE = '/browse', diff --git a/datahub-web-react/src/graphql/glossaryTerm.graphql b/datahub-web-react/src/graphql/glossaryTerm.graphql index 55e82e85fe1419..1a6da8b0e00924 100644 --- a/datahub-web-react/src/graphql/glossaryTerm.graphql +++ b/datahub-web-react/src/graphql/glossaryTerm.graphql @@ -78,3 +78,11 @@ mutation createGlossaryTerm($input: CreateGlossaryEntityInput!) { mutation createGlossaryNode($input: CreateGlossaryEntityInput!) { createGlossaryNode(input: $input) } + +mutation addRelatedTerms($input: RelatedTermsInput!) { + addRelatedTerms(input: $input) +} + +mutation removeRelatedTerms($input: RelatedTermsInput!) { + removeRelatedTerms(input: $input) +} diff --git a/datahub-web-react/src/graphql/me.graphql b/datahub-web-react/src/graphql/me.graphql index ce787df78a6010..7f1d426f3d6a7c 100644 --- a/datahub-web-react/src/graphql/me.graphql +++ b/datahub-web-react/src/graphql/me.graphql @@ -30,6 +30,7 @@ query getMe { manageDomains manageTests manageGlossaries + manageUserCredentials } } } diff --git a/datahub-web-react/src/graphql/user.graphql b/datahub-web-react/src/graphql/user.graphql index 9cc318152dc2b2..57d3d3538b4e88 100644 --- a/datahub-web-react/src/graphql/user.graphql +++ b/datahub-web-react/src/graphql/user.graphql @@ -2,6 +2,7 @@ query getUser($urn: String!, $groupsCount: Int!) { corpUser(urn: $urn) { urn username + isNativeUser info { active displayName @@ -90,6 +91,7 @@ query listUsers($input: ListUsersInput!) { users { urn username + isNativeUser info { active displayName @@ -124,3 +126,22 @@ mutation updateCorpUserProperties($urn: String!, $input: CorpUserUpdateInput!) { urn } } + +mutation createNativeUserInviteToken { + createNativeUserInviteToken { + inviteToken + } +} + +query getNativeUserInviteToken { + getNativeUserInviteToken { + inviteToken + } +} + +mutation createNativeUserResetToken($input: CreateNativeUserResetTokenInput!) { + createNativeUserResetToken(input: $input) { + resetToken + } +} + diff --git a/docs/how/auth/add-users.md b/docs/how/auth/add-users.md index 3804b517e1dc76..368e3cb6b600a7 100644 --- a/docs/how/auth/add-users.md +++ b/docs/how/auth/add-users.md @@ -1,14 +1,49 @@ # Adding Users to DataHub -Users can log into DataHub in 2 ways: +Users can log into DataHub in 3 ways: -1. Static credentials (Simplest) -2. Single Sign-On via [OpenID Connect](https://www.google.com/search?q=openid+connect&oq=openid+connect&aqs=chrome.0.0i131i433i512j0i512l4j69i60l2j69i61.1468j0j7&sourceid=chrome&ie=UTF-8) (For Production Use) +1. Invite users via the UI +2. Static credentials +3. Single Sign-On via [OpenID Connect](https://www.google.com/search?q=openid+connect&oq=openid+connect&aqs=chrome.0.0i131i433i512j0i512l4j69i60l2j69i61.1468j0j7&sourceid=chrome&ie=UTF-8) (For Production Use) -which can be both enabled simultaneously. Option 1 is useful for running proof-of-concept exercises, or just getting DataHub up & running quickly. Option 2 is highly recommended for deploying DataHub in production. +which can be enabled simultaneously. Options 1 and 2 are useful for running proof-of-concept exercises, or just getting DataHub up & running quickly. Option 3 is highly recommended for deploying DataHub in production. +# Method 1: Inviting users via the DataHub UI -# Method 1: Configuring static credentials +## Send prospective users an invite link + +With the right permissions (`MANAGE_USER_CREDENTIALS`), you can invite new users to your deployed DataHub instance from the UI. It's as simple as sending a link! + +First navigate, to the Users and Groups tab (under Access) on the Settings page. You'll then see an `Invite Users` button. Note that this will only be clickable +if you have the correct permissions. + +![](../../imgs/invite-users-button.png) + +If you click on this button, you'll see a pop-up where you can copy an invite link to send to users, or generate a fresh one. + +![](../../imgs/invite-users-popup.png) + +When a new user visits the link, they will be directed to a sign up screen. Note that if a new link has since been regenerated, the new user won't be able to sign up! + +![](../../imgs/user-sign-up-screen.png) + +## Reset password for native users + +If a user forgets their password, an admin user with the `MANAGE_USER_CREDENTIALS` privilege can go to the Users and Groups tab and click on the respective user's +`Reset user password` button. + +![](../../imgs/reset-user-password-button.png) + +Similar to the invite link, you can generate a new reset link and send a link to that user which they can use to reset their credentials. + +![](../../imgs/reset-user-password-popup.png) + +When that user visits the link, they will be direct to a screen where they can reset their credentials. If the link is older than 24 hours or another link has since +been generated, they won't be able to reset their credentials! + +![](../../imgs/reset-credentials-screen.png) + +# Method 2: Configuring static credentials ## Create a user.props file @@ -134,8 +169,7 @@ and modify the `datahub-frontend-react` block to contain the extra volume mount. datahub docker quickstart —quickstart-compose-file .yml ``` - -# Method 2: Configuring SSO via OpenID Connect +# Method 3: Configuring SSO via OpenID Connect Setting up SSO via OpenID Connect means that users will be able to login to DataHub via a central Identity Provider such as diff --git a/docs/imgs/invite-users-button.png b/docs/imgs/invite-users-button.png new file mode 100644 index 00000000000000..a5d07a1c1e7e75 Binary files /dev/null and b/docs/imgs/invite-users-button.png differ diff --git a/docs/imgs/invite-users-popup.png b/docs/imgs/invite-users-popup.png new file mode 100644 index 00000000000000..621b1521eae752 Binary files /dev/null and b/docs/imgs/invite-users-popup.png differ diff --git a/docs/imgs/reset-credentials-screen.png b/docs/imgs/reset-credentials-screen.png new file mode 100644 index 00000000000000..4b680837b77ab1 Binary files /dev/null and b/docs/imgs/reset-credentials-screen.png differ diff --git a/docs/imgs/reset-user-password-button.png b/docs/imgs/reset-user-password-button.png new file mode 100644 index 00000000000000..5b1f3ee153d072 Binary files /dev/null and b/docs/imgs/reset-user-password-button.png differ diff --git a/docs/imgs/reset-user-password-popup.png b/docs/imgs/reset-user-password-popup.png new file mode 100644 index 00000000000000..ac2456dde4d4d3 Binary files /dev/null and b/docs/imgs/reset-user-password-popup.png differ diff --git a/docs/imgs/user-sign-up-screen.png b/docs/imgs/user-sign-up-screen.png new file mode 100644 index 00000000000000..88c2589203bd18 Binary files /dev/null and b/docs/imgs/user-sign-up-screen.png differ diff --git a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py index e26470f823a6c7..df9c7cd9856526 100644 --- a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py +++ b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py @@ -324,6 +324,12 @@ def log_ingestion_stats(self) -> None: self.ctx.graph, ) + def _count_all_vals(self, d: Dict[str, List]) -> int: + result = 0 + for val in d.values(): + result += len(val) + return result + def pretty_print_summary(self, warnings_as_failure: bool = False) -> int: click.echo() click.secho(f"Source ({self.config.source.type}) report:", bold=True) @@ -331,12 +337,29 @@ def pretty_print_summary(self, warnings_as_failure: bool = False) -> int: click.secho(f"Sink ({self.config.sink.type}) report:", bold=True) click.echo(self.sink.get_report().as_string()) click.echo() + workunits_produced = self.source.get_report().workunits_produced if self.source.get_report().failures or self.sink.get_report().failures: - click.secho("Pipeline finished with failures", fg="bright_red", bold=True) + num_failures_source = self._count_all_vals( + self.source.get_report().failures + ) + click.secho( + f"Pipeline finished with {num_failures_source} failures in source producing {workunits_produced} workunits", + fg="bright_red", + bold=True, + ) return 1 elif self.source.get_report().warnings or self.sink.get_report().warnings: - click.secho("Pipeline finished with warnings", fg="yellow", bold=True) + num_warn_source = self._count_all_vals(self.source.get_report().warnings) + click.secho( + f"Pipeline finished with {num_warn_source} warnings in source producing {workunits_produced} workunits", + fg="yellow", + bold=True, + ) return 1 if warnings_as_failure else 0 else: - click.secho("Pipeline finished successfully", fg="green", bold=True) + click.secho( + f"Pipeline finished successfully producing {workunits_produced} workunits", + fg="green", + bold=True, + ) return 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt.py b/metadata-ingestion/src/datahub/ingestion/source/dbt.py index 04810603f81316..8ec0686fbcc360 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt.py @@ -216,8 +216,9 @@ def aws_connection_needed_if_s3_uris_present( uri_containing_fields = [ f for f in ["manifest_path", "catalog_path", "sources_path"] - if values.get(f, "").startswith("s3://") + if (values.get(f) or "").startswith("s3://") ] + if uri_containing_fields and not aws_connection: raise ValueError( f"Please provide aws_connection configuration, since s3 uris have been provided in fields {uri_containing_fields}" @@ -431,7 +432,7 @@ def get_urn_from_dbtNode( data_platform_instance: Optional[str], ) -> str: db_fqn = get_db_fqn(database, schema, name) - if data_platform_instance is not None: + if data_platform_instance is not None and target_platform == DBT_PLATFORM: db_fqn = f"{data_platform_instance}.{db_fqn}" return mce_builder.make_dataset_urn(target_platform, db_fqn, env) @@ -508,7 +509,7 @@ def get_upstreams( name, platform_value, environment, - platform_instance if platform_value == DBT_PLATFORM else None, + platform_instance, ) ) return upstream_urns @@ -921,7 +922,7 @@ def create_platform_mces( node.name, mce_platform, self.config.env, - self.config.platform_instance if mce_platform == DBT_PLATFORM else None, + self.config.platform_instance, ) self.save_checkpoint(node_datahub_urn) @@ -1261,7 +1262,7 @@ def _create_lineage_aspect_for_dbt_node( self.config.target_platform, self.config.env, self.config.disable_dbt_node_creation, - None, + self.config.platform_instance, ) # if a node is of type source in dbt, its upstream lineage should have the corresponding table/view @@ -1274,7 +1275,7 @@ def _create_lineage_aspect_for_dbt_node( node.name, self.config.target_platform, self.config.env, - None, + self.config.platform_instance, ) ) if upstream_urns: diff --git a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py index d3bcc0222edf8d..ee2d7358909b4f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py +++ b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py @@ -313,16 +313,30 @@ def _parse_datatype(type: IcebergTypes.Type, nullable: bool = False) -> Dict[str "_nullable": nullable, } elif type.is_map_type(): + # The Iceberg Map type will be handled differently. The idea is to translate the map + # similar to the Map.Entry struct of Java i.e. as an array of map_entry struct, where + # the map_entry struct has a key field and a value field. The key and value type can + # be complex or primitive types. map_type: IcebergTypes.MapType = type - kt = _parse_datatype(map_type.key_type()) - vt = _parse_datatype(map_type.value_type()) - # keys are assumed to be strings in avro map + map_entry: Dict[str, Any] = { + "type": "record", + "name": _gen_name("__map_entry_"), + "fields": [ + { + "name": "key", + "type": _parse_datatype(map_type.key_type(), False), + }, + { + "name": "value", + "type": _parse_datatype(map_type.value_type(), True), + }, + ], + } return { - "type": "map", - "values": vt, - "native_data_type": str(map_type), - "key_type": kt, - "key_native_data_type": repr(map_type.key_type()), + "type": "array", + "items": map_entry, + "native_data_type": str(type), + "_nullable": nullable, } elif type.is_struct_type(): structType: IcebergTypes.StructType = type @@ -340,7 +354,7 @@ def _parse_struct_fields(parts: Tuple[NestedField], nullable: bool) -> Dict[str, fields.append({"name": field_name, "type": field_type, "doc": nested_field.doc}) return { "type": "record", - "name": "__struct_{}".format(str(uuid.uuid4()).replace("-", "")), + "name": _gen_name("__struct_"), "fields": fields, "native_data_type": "struct<{}>".format(parts), "_nullable": nullable, @@ -367,7 +381,7 @@ def _parse_basic_datatype( fixed_type: IcebergTypes.FixedType = type return { "type": "fixed", - "name": "name", # TODO: Pass-in field name since it is required by Avro spec + "name": _gen_name("__fixed_"), "size": fixed_type.length, "native_data_type": repr(fixed_type), "_nullable": nullable, @@ -380,7 +394,9 @@ def _parse_basic_datatype( return { # "type": "bytes", # when using bytes, avro drops _nullable attribute and others. See unit test. "type": "fixed", # to fix avro bug ^ resolved by using a fixed type - "name": "bogus", # to fix avro bug ^ resolved by using a fixed type + "name": _gen_name( + "__fixed_" + ), # to fix avro bug ^ resolved by using a fixed type "size": 1, # to fix avro bug ^ resolved by using a fixed type "logicalType": "decimal", "precision": decimal_type.precision, @@ -431,3 +447,7 @@ def _parse_basic_datatype( } return {"type": "null", "native_data_type": repr(type)} + + +def _gen_name(prefix: str) -> str: + return f"{prefix}{str(uuid.uuid4()).replace('-', '')}" diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker_common.py b/metadata-ingestion/src/datahub/ingestion/source/looker_common.py index 5d20f6c96cb023..0c8c496a9041c0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker_common.py @@ -543,7 +543,14 @@ def from_api( # noqa: C901 views.add(explore.name) if explore.joins is not None and explore.joins != []: - potential_views = [e.name for e in explore.joins if e.name is not None] + join_to_orig_name_map = {} + potential_views = [] + for e in explore.joins: + if e.from_ is not None: + potential_views.append(e.from_) + join_to_orig_name_map[e.name] = e.from_ + elif e.name is not None: + potential_views.append(e.name) for e_join in [ e for e in explore.joins if e.dependent_fields is not None ]: @@ -551,6 +558,9 @@ def from_api( # noqa: C901 for field_name in e_join.dependent_fields: try: view_name = LookerUtil._extract_view_from_field(field_name) + orig_name = join_to_orig_name_map.get(e_join.name) + if orig_name is not None: + view_name = orig_name potential_views.append(view_name) except AssertionError: reporter.report_warning( diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/sql/bigquery.py index a9d6e2ee413523..4a6ca80a5cd9ee 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/bigquery.py @@ -646,6 +646,20 @@ def _create_lineage_map(self, entries: Iterable[QueryEvent]) -> Dict[str, Set[st self.report.num_skipped_lineage_entries_other += 1 return lineage_map + def is_table_partitioned( + self, database: Optional[str], schema: str, table: str + ) -> bool: + project_id: str + if database: + project_id = database + else: + url = self.config.get_sql_alchemy_url() + engine = create_engine(url, **self.config.options) + with engine.connect() as con: + inspector = inspect(con) + project_id = self.get_db_name(inspector) + return f"{project_id}.{schema}.{table}" in self.partition_info + def get_latest_partition( self, schema: str, table: str ) -> Optional[BigQueryPartitionColumn]: @@ -653,8 +667,13 @@ def get_latest_partition( engine = create_engine(url, **self.config.options) with engine.connect() as con: inspector = inspect(con) + project_id = self.get_db_name(inspector) + if not self.is_table_partitioned( + database=project_id, schema=schema, table=table + ): + return None sql = BQ_GET_LATEST_PARTITION_TEMPLATE.format( - project_id=self.get_db_name(inspector), schema=schema, table=table + project_id=project_id, schema=schema, table=table ) result = con.execute(sql) # Bigquery only supports one partition column diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 1ea4270b07d981..e692fc0d6d6299 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -102,6 +102,8 @@ logger: logging.Logger = logging.getLogger(__name__) +MISSING_COLUMN_INFO = "missing column information" + def _platform_alchemy_uri_tester_gen( platform: str, opt_starts_with: Optional[str] = None @@ -1040,7 +1042,7 @@ def _get_columns( try: columns = inspector.get_columns(table, schema) if len(columns) == 0: - self.report.report_warning(dataset_name, "missing column information") + self.report.report_warning(MISSING_COLUMN_INFO, dataset_name) except Exception as e: self.report.report_warning( dataset_name, @@ -1301,6 +1303,11 @@ def generate_partition_profiler_query( ) -> Tuple[Optional[str], Optional[str]]: return None, None + def is_table_partitioned( + self, database: Optional[str], schema: str, table: str + ) -> Optional[bool]: + return None + # Override if you want to do additional checks def is_dataset_eligible_for_profiling( self, dataset_name: str, sql_config: SQLAlchemyConfig @@ -1339,10 +1346,25 @@ def loop_profiler_requests( logger.debug(f"{dataset_name} has already been seen, skipping...") continue + missing_column_info_warn = self.report.warnings.get(MISSING_COLUMN_INFO) + if ( + missing_column_info_warn is not None + and dataset_name in missing_column_info_warn + ): + continue + (partition, custom_sql) = self.generate_partition_profiler_query( schema, table, self.config.profiling.partition_datetime ) + if partition is None and self.is_table_partitioned( + database=None, schema=schema, table=table + ): + self.report.report_warning( + "profile skipped as partitioned table empty", dataset_name + ) + continue + if ( partition is not None and not self.config.profiling.partition_profiling_enabled diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py index ddebc6a437ea4b..941cd9a9ab8934 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau_common.py @@ -412,6 +412,9 @@ def make_table_urn( platform = "teradata" elif connection_type in ("sqlserver"): platform = "mssql" + elif connection_type in ("athena"): + platform = "athena" + upstream_db = "" else: platform = connection_type diff --git a/metadata-ingestion/src/datahub/ingestion/source/usage/bigquery_usage.py b/metadata-ingestion/src/datahub/ingestion/source/usage/bigquery_usage.py index 7f14f5e5c2acf8..de673b3a2dcd6c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/usage/bigquery_usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/usage/bigquery_usage.py @@ -926,7 +926,11 @@ def _get_bigquery_log_entries_via_gcp_logging( def _create_operation_aspect_work_unit( self, event: QueryEvent ) -> Optional[MetadataWorkUnit]: - if event.statementType in OPERATION_STATEMENT_TYPES and event.destinationTable: + if ( + event.statementType in OPERATION_STATEMENT_TYPES + and event.destinationTable + and self._is_table_allowed(event.destinationTable) + ): destination_table: BigQueryTableRef try: destination_table = event.destinationTable.remove_extras() @@ -992,14 +996,16 @@ def _parse_bigquery_log_entries( if not self._is_table_allowed(event.resource): self.report.num_filtered_read_events += 1 continue + + if event.readReason: + self.report.read_reasons_stat[event.readReason] = ( + self.report.read_reasons_stat.get(event.readReason, 0) + 1 + ) self.report.num_read_events += 1 missing_query_entry = QueryEvent.get_missing_key_entry(entry) if event is None and missing_query_entry is None: event = QueryEvent.from_entry(entry) - if not self._is_table_allowed(event.destinationTable): - self.report.num_filtered_query_events += 1 - continue self.report.num_query_events += 1 wu = self._create_operation_aspect_work_unit(event) if wu: @@ -1009,9 +1015,6 @@ def _parse_bigquery_log_entries( if event is None and missing_query_entry_v2 is None: event = QueryEvent.from_entry_v2(entry) - if not self._is_table_allowed(event.destinationTable): - self.report.num_filtered_query_events += 1 - continue self.report.num_query_events += 1 wu = self._create_operation_aspect_work_unit(event) if wu: diff --git a/metadata-ingestion/src/datahub/ingestion/source_report/usage/bigquery_usage.py b/metadata-ingestion/src/datahub/ingestion/source_report/usage/bigquery_usage.py index 89cb27fb93170e..766e342da92730 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_report/usage/bigquery_usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source_report/usage/bigquery_usage.py @@ -25,6 +25,9 @@ class BigQueryUsageSourceReport(SourceReport): log_entry_end_time: Optional[str] = None num_usage_workunits_emitted: Optional[int] = None num_operational_stats_workunits_emitted: Optional[int] = None + read_reasons_stat: Counter[str] = dataclasses.field( + default_factory=collections.Counter + ) def report_dropped(self, key: str) -> None: self.dropped_table[key] += 1 diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json index 87547f1a90302c..02a40c85d1165d 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json @@ -67,7 +67,8 @@ "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null } } }, @@ -88,12 +89,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -117,7 +120,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD)", "type": "TRANSFORMED" @@ -126,7 +130,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD)", "type": "TRANSFORMED" @@ -135,7 +140,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD)", "type": "TRANSFORMED" @@ -226,12 +232,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -248,6 +256,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -266,6 +276,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -284,6 +296,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -302,6 +316,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -328,7 +344,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" @@ -337,9 +354,10 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD)", + "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD)", "type": "TRANSFORMED" } ], @@ -423,12 +441,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -445,6 +465,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -463,6 +485,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -481,6 +505,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -499,6 +525,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -517,6 +545,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -535,6 +565,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -561,7 +593,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD)", "type": "TRANSFORMED" @@ -570,7 +603,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD)", "type": "TRANSFORMED" @@ -579,7 +613,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD)", "type": "TRANSFORMED" @@ -588,7 +623,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD)", "type": "TRANSFORMED" @@ -597,7 +633,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD)", "type": "TRANSFORMED" @@ -606,7 +643,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD)", "type": "TRANSFORMED" @@ -615,7 +653,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" @@ -701,12 +740,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -723,6 +764,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -741,6 +784,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -759,6 +804,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -785,7 +832,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" @@ -877,7 +925,8 @@ "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null } } }, @@ -889,12 +938,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1581759273000, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -911,6 +962,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -929,6 +982,8 @@ "jsonPath": null, "nullable": false, "description": "dbt comment: Actors column \u2013 from postgres\n\ndbt model description: description for first_name from dbt", + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -953,6 +1008,8 @@ "jsonPath": null, "nullable": false, "description": "description for last_name from dbt", + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -971,6 +1028,8 @@ "jsonPath": null, "nullable": false, "description": "description for last_update from dbt", + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -997,7 +1056,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.actor,PROD)", "type": "TRANSFORMED" @@ -1075,12 +1135,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1581759930000, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -1097,6 +1159,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1115,6 +1179,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1133,6 +1199,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1151,6 +1219,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1169,6 +1239,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1187,6 +1259,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -1205,6 +1279,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1223,6 +1299,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1249,7 +1327,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.address,PROD)", "type": "TRANSFORMED" @@ -1327,12 +1406,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1581759987000, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -1349,6 +1430,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1367,6 +1450,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -1385,6 +1470,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1411,7 +1498,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.category,PROD)", "type": "TRANSFORMED" @@ -1489,12 +1577,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1581759925000, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -1511,6 +1601,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1529,6 +1621,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1547,6 +1641,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1565,6 +1661,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -1591,7 +1689,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.city,PROD)", "type": "TRANSFORMED" @@ -1676,7 +1775,8 @@ "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null } } }, @@ -1688,12 +1788,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1581759840000, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -1710,6 +1812,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1728,6 +1832,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1746,6 +1852,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -1772,7 +1880,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.country,PROD)", "type": "TRANSFORMED" @@ -1850,12 +1959,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1581760640000, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -1872,6 +1983,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1890,6 +2003,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.BooleanType": {} @@ -1908,6 +2023,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1926,6 +2043,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.DateType": {} @@ -1944,6 +2063,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -1962,6 +2083,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1980,6 +2103,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -1998,6 +2123,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.StringType": {} @@ -2016,6 +2143,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -2034,6 +2163,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2060,7 +2191,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer,PROD)", "type": "TRANSFORMED" @@ -2138,12 +2270,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1580505371996, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -2160,6 +2294,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2178,6 +2314,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2196,6 +2334,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -2214,6 +2354,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2232,6 +2374,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2250,6 +2394,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2276,7 +2422,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_01,PROD)", "type": "TRANSFORMED" @@ -2362,7 +2509,8 @@ "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null } } }, @@ -2374,12 +2522,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1582319845996, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -2396,6 +2546,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2414,6 +2566,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2432,6 +2586,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -2450,6 +2606,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2468,6 +2626,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2486,6 +2646,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2512,7 +2674,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_02,PROD)", "type": "TRANSFORMED" @@ -2590,12 +2753,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1584998318996, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -2612,6 +2777,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2630,6 +2797,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2648,6 +2817,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -2666,6 +2837,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2684,6 +2857,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2702,6 +2877,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2728,7 +2905,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_03,PROD)", "type": "TRANSFORMED" @@ -2806,12 +2984,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1588287228996, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -2828,6 +3008,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2846,6 +3028,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2864,6 +3048,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -2882,6 +3068,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2900,6 +3088,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2918,6 +3108,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -2944,7 +3136,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_04,PROD)", "type": "TRANSFORMED" @@ -3022,12 +3215,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": 1589460269996, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -3044,6 +3239,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3062,6 +3259,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3080,6 +3279,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -3098,6 +3299,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3116,6 +3319,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3134,6 +3339,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3160,7 +3367,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_05,PROD)", "type": "TRANSFORMED" @@ -3238,12 +3446,14 @@ "created": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "lastModified": { "time": -62135596800000, "actor": "urn:li:corpuser:dbt_executor", - "impersonator": null + "impersonator": null, + "message": null }, "deleted": null, "dataset": null, @@ -3260,6 +3470,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3278,6 +3490,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3296,6 +3510,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.TimeType": {} @@ -3314,6 +3530,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3332,6 +3550,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3350,6 +3570,8 @@ "jsonPath": null, "nullable": false, "description": null, + "created": null, + "lastModified": null, "type": { "type": { "com.linkedin.pegasus2avro.schema.NumberType": {} @@ -3376,7 +3598,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" @@ -3410,7 +3633,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD)", "type": "TRANSFORMED" @@ -3444,7 +3668,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" @@ -3478,7 +3703,8 @@ "auditStamp": { "time": 0, "actor": "urn:li:corpuser:unknown", - "impersonator": null + "impersonator": null, + "message": null }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" diff --git a/metadata-ingestion/tests/integration/looker/golden_test_ingest_joins.json b/metadata-ingestion/tests/integration/looker/golden_test_ingest_joins.json index 5a6204407feb40..5e8af476a5028c 100644 --- a/metadata-ingestion/tests/integration/looker/golden_test_ingest_joins.json +++ b/metadata-ingestion/tests/integration/looker/golden_test_ingest_joins.json @@ -113,6 +113,15 @@ }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.underlying_view,PROD)", "type": "VIEW" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": null + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_joined_view_original_name,PROD)", + "type": "VIEW" } ], "fineGrainedLineages": null diff --git a/metadata-ingestion/tests/integration/looker/golden_test_ingest_unaliased_joins.json b/metadata-ingestion/tests/integration/looker/golden_test_ingest_unaliased_joins.json index 3a835d95845168..ab0ee04a08a650 100644 --- a/metadata-ingestion/tests/integration/looker/golden_test_ingest_unaliased_joins.json +++ b/metadata-ingestion/tests/integration/looker/golden_test_ingest_unaliased_joins.json @@ -103,6 +103,15 @@ }, "dataset": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_view_has_no_fields,PROD)", "type": "VIEW" + }, + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown", + "impersonator": null + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:looker,lkml_samples.view.my_joined_view_original_name,PROD)", + "type": "VIEW" } ], "fineGrainedLineages": null diff --git a/metadata-ingestion/tests/integration/looker/test_looker.py b/metadata-ingestion/tests/integration/looker/test_looker.py index c89c7a12211bf1..23242718417afb 100644 --- a/metadata-ingestion/tests/integration/looker/test_looker.py +++ b/metadata-ingestion/tests/integration/looker/test_looker.py @@ -222,6 +222,10 @@ def setup_mock_explore_with_joins(mocked_client): relationship="one_to_one", sql_on="1=1", ), + LookmlModelExploreJoins( + name="my_joined_view_join_name", + from_="my_joined_view_original_name", + ), ], ) @@ -249,7 +253,11 @@ def setup_mock_explore_unaliased_with_joins(mocked_client): view_label="My Labeled View", relationship="one_to_one", sql_on="1=1", - ) + ), + LookmlModelExploreJoins( + name="my_joined_view_join_name", + from_="my_joined_view_original_name", + ), ], ) diff --git a/metadata-ingestion/tests/unit/test_iceberg.py b/metadata-ingestion/tests/unit/test_iceberg.py index 949f6d10cc0577..a6aeb1919c6082 100644 --- a/metadata-ingestion/tests/unit/test_iceberg.py +++ b/metadata-ingestion/tests/unit/test_iceberg.py @@ -4,7 +4,7 @@ if sys.version_info < (3, 7): pytest.skip("iceberg not available for python < 3.7", allow_module_level=True) -from typing import Any +from typing import Any, Optional from iceberg.api import types as IcebergTypes from iceberg.api.types.types import NestedField @@ -13,18 +13,13 @@ from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.source.azure.azure_common import AdlsSourceConfig from datahub.ingestion.source.iceberg.iceberg import IcebergSource, IcebergSourceConfig -from datahub.metadata.com.linkedin.pegasus2avro.schema import ( - ArrayType, - MapType, - SchemaField, -) +from datahub.metadata.com.linkedin.pegasus2avro.schema import ArrayType, SchemaField from datahub.metadata.schema_classes import ( ArrayTypeClass, BooleanTypeClass, BytesTypeClass, DateTypeClass, FixedTypeClass, - MapTypeClass, NumberTypeClass, RecordTypeClass, StringTypeClass, @@ -44,7 +39,7 @@ def with_iceberg_source() -> IcebergSource: def assert_field( schema_field: SchemaField, - expected_description: str, + expected_description: Optional[str], expected_nullable: bool, expected_type: Any, ) -> None: @@ -267,64 +262,62 @@ def test_iceberg_list_to_schema_field( @pytest.mark.parametrize( - "iceberg_type, expected_map_value_type", + "iceberg_type, expected_map_type", [ - (IcebergTypes.BinaryType.get(), "bytes"), - (IcebergTypes.BooleanType.get(), "boolean"), - (IcebergTypes.DateType.get(), "date"), + (IcebergTypes.BinaryType.get(), BytesTypeClass), + (IcebergTypes.BooleanType.get(), BooleanTypeClass), + (IcebergTypes.DateType.get(), DateTypeClass), ( IcebergTypes.DecimalType.of(3, 2), - "decimal", + NumberTypeClass, ), - (IcebergTypes.DoubleType.get(), "double"), - (IcebergTypes.FixedType.of_length(4), "fixed"), - (IcebergTypes.FloatType.get(), "float"), - (IcebergTypes.IntegerType.get(), "int"), - (IcebergTypes.LongType.get(), "long"), - (IcebergTypes.StringType.get(), "string"), + (IcebergTypes.DoubleType.get(), NumberTypeClass), + (IcebergTypes.FixedType.of_length(4), FixedTypeClass), + (IcebergTypes.FloatType.get(), NumberTypeClass), + (IcebergTypes.IntegerType.get(), NumberTypeClass), + (IcebergTypes.LongType.get(), NumberTypeClass), + (IcebergTypes.StringType.get(), StringTypeClass), ( IcebergTypes.TimestampType.with_timezone(), - "timestamp-micros", + TimeTypeClass, ), ( IcebergTypes.TimestampType.without_timezone(), - "timestamp-micros", + TimeTypeClass, ), - (IcebergTypes.TimeType.get(), "time-micros"), + (IcebergTypes.TimeType.get(), TimeTypeClass), ( IcebergTypes.UUIDType.get(), - "uuid", + StringTypeClass, ), ], ) def test_iceberg_map_to_schema_field( - iceberg_type: IcebergTypes.PrimitiveType, expected_map_value_type: Any + iceberg_type: IcebergTypes.PrimitiveType, expected_map_type: Any ) -> None: """ - Test converting a map typed Iceberg field to a MapType SchemaField, including the map value type. + Test converting a map typed Iceberg field to a MapType SchemaField, where the key is the same type as the value. """ map_column: NestedField = NestedField.required( 1, "mapField", - IcebergTypes.MapType.of_required( - 11, 12, IcebergTypes.StringType.get(), iceberg_type - ), + IcebergTypes.MapType.of_required(11, 12, iceberg_type, iceberg_type), "documentation", ) iceberg_source_instance = with_iceberg_source() schema_fields = iceberg_source_instance._get_schema_fields_for_column(map_column) - assert len(schema_fields) == 1, f"Expected 1 field, but got {len(schema_fields)}" - assert_field(schema_fields[0], map_column.doc, map_column.is_optional, MapTypeClass) - assert isinstance( - schema_fields[0].type.type, MapType - ), f"Field type {schema_fields[0].type.type} was expected to be {MapType}" - mapType: MapType = schema_fields[0].type.type - assert ( - mapType.keyType == "string" - ), f"Map key type {mapType.keyType} should always be a string" - assert ( - mapType.valueType == expected_map_value_type - ), f"Map value type {mapType.valueType} was expected to be {expected_map_value_type}" + # Converting an Iceberg Map type will be done by creating an array of struct(key, value) records. + # The first field will be the array. + assert len(schema_fields) == 3, f"Expected 3 fields, but got {len(schema_fields)}" + assert_field( + schema_fields[0], map_column.doc, map_column.is_optional, ArrayTypeClass + ) + + # The second field will be the key type + assert_field(schema_fields[1], None, False, expected_map_type) + + # The third field will be the value type + assert_field(schema_fields[2], None, True, expected_map_type) @pytest.mark.parametrize( diff --git a/metadata-models/src/main/pegasus/com/linkedin/identity/CorpUserCredentials.pdl b/metadata-models/src/main/pegasus/com/linkedin/identity/CorpUserCredentials.pdl new file mode 100644 index 00000000000000..9b7c6ac2fbdce1 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/identity/CorpUserCredentials.pdl @@ -0,0 +1,32 @@ +namespace com.linkedin.identity + +import com.linkedin.common.CorpuserUrn + +/** + * Corp user credentials + */ +@Aspect = { + "name": "corpUserCredentials" +} +@Aspect.EntityUrns = [ "com.linkedin.common.CorpuserUrn" ] +record CorpUserCredentials { + /** + * Salt used to hash password + */ + salt: string + + /** + * Hashed password generated by concatenating salt and password, then hashing + */ + hashedPassword: string + + /** + * Optional token needed to reset a user's password. Can only be set by the admin. + */ + passwordResetToken: optional string + + /** + * When the password reset token expires. + */ + passwordResetTokenExpirationTimeMillis: optional long +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/identity/InviteToken.pdl b/metadata-models/src/main/pegasus/com/linkedin/identity/InviteToken.pdl new file mode 100644 index 00000000000000..16559bfc2ccaaa --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/identity/InviteToken.pdl @@ -0,0 +1,16 @@ +namespace com.linkedin.identity + +import com.linkedin.common.Urn + +/** + * Aspect used to store the token needed to invite native DataHub users + */ +@Aspect = { + "name": "inviteToken" +} +record InviteToken { + /** + * The encrypted invite token. + */ + token: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/key/InviteTokenKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/key/InviteTokenKey.pdl new file mode 100644 index 00000000000000..d486a391907d47 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/key/InviteTokenKey.pdl @@ -0,0 +1,14 @@ +namespace com.linkedin.metadata.key + +/** + * Key for an InviteToken. + */ +@Aspect = { + "name": "inviteTokenKey" +} +record InviteTokenKey { + /** + * A unique id for the invite token. + */ + id: string +} diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index c024ddfc37e5fb..cb5af5e8358950 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -88,6 +88,7 @@ entities: - groupMembership - globalTags - status + - corpUserCredentials - name: corpGroup doc: CorpGroup represents an identity of a group of users in the enterprise. keyAspect: corpGroupKey @@ -232,4 +233,9 @@ entities: aspects: - dataHubUpgradeRequest - dataHubUpgradeResult + - name: inviteToken + category: core + keyAspect: inviteTokenKey + aspects: + - inviteToken events: diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java new file mode 100644 index 00000000000000..007a22ba1da7fe --- /dev/null +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java @@ -0,0 +1,295 @@ +package com.datahub.authentication.user; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.identity.CorpUserCredentials; +import com.linkedin.identity.CorpUserInfo; +import com.linkedin.identity.CorpUserStatus; +import com.linkedin.identity.InviteToken; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.security.MessageDigest; +import java.security.SecureRandom; +import java.time.Instant; +import java.util.Base64; +import java.util.Objects; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +import static com.linkedin.metadata.Constants.*; + + +/** + * Service responsible for creating, updating and authenticating native DataHub users. + */ +@Slf4j +public class NativeUserService { + private static final int LOWERCASE_ASCII_START = 97; + private static final int LOWERCASE_ASCII_END = 122; + private static final int INVITE_TOKEN_LENGTH = 32; + private static final int SALT_TOKEN_LENGTH = 16; + private static final int PASSWORD_RESET_TOKEN_LENGTH = 32; + private static final String HASHING_ALGORITHM = "SHA-256"; + private static final long ONE_DAY_MILLIS = TimeUnit.DAYS.toMillis(1); + + private final EntityService _entityService; + private final EntityClient _entityClient; + private final SecretService _secretService; + private final SecureRandom _secureRandom; + private final MessageDigest _messageDigest; + + public NativeUserService(@Nonnull EntityService entityService, @Nonnull EntityClient entityClient, @Nonnull SecretService secretService) + throws Exception { + Objects.requireNonNull(entityService, "entityService must not be null!"); + Objects.requireNonNull(entityClient, "entityClient must not be null!"); + Objects.requireNonNull(secretService, "secretService must not be null!"); + + _entityService = entityService; + _entityClient = entityClient; + _secretService = secretService; + _secureRandom = new SecureRandom(); + _messageDigest = MessageDigest.getInstance(HASHING_ALGORITHM); + } + + public void createNativeUser(@Nonnull String userUrnString, @Nonnull String fullName, @Nonnull String email, + @Nonnull String title, @Nonnull String password, @Nonnull String inviteToken, Authentication authentication) + throws Exception { + Objects.requireNonNull(userUrnString, "userUrnSting must not be null!"); + Objects.requireNonNull(fullName, "fullName must not be null!"); + Objects.requireNonNull(email, "email must not be null!"); + Objects.requireNonNull(title, "title must not be null!"); + Objects.requireNonNull(password, "password must not be null!"); + Objects.requireNonNull(inviteToken, "inviteToken must not be null!"); + + InviteToken inviteTokenAspect = + (InviteToken) _entityService.getLatestAspect(Urn.createFromString(GLOBAL_INVITE_TOKEN), + INVITE_TOKEN_ASPECT_NAME); + if (inviteTokenAspect == null || !inviteTokenAspect.hasToken() || !_secretService.decrypt( + inviteTokenAspect.getToken()).equals(inviteToken)) { + throw new RuntimeException("Invalid sign-up token. Please ask your administrator to send you an updated link!"); + } + + Urn userUrn = Urn.createFromString(userUrnString); + if (_entityService.exists(userUrn)) { + throw new RuntimeException("This user already exists! Cannot create a new user."); + } + updateCorpUserInfo(userUrn, fullName, email, title, authentication); + updateCorpUserStatus(userUrn, authentication); + updateCorpUserCredentials(userUrn, password, authentication); + } + + void updateCorpUserInfo(@Nonnull Urn userUrn, @Nonnull String fullName, @Nonnull String email, @Nonnull String title, + Authentication authentication) throws Exception { + // Construct corpUserInfo + final CorpUserInfo corpUserInfo = new CorpUserInfo(); + corpUserInfo.setFullName(fullName); + corpUserInfo.setDisplayName(fullName); + corpUserInfo.setEmail(email); + corpUserInfo.setTitle(title); + corpUserInfo.setActive(true); + + // Ingest corpUserInfo MCP + final MetadataChangeProposal corpUserInfoProposal = new MetadataChangeProposal(); + corpUserInfoProposal.setEntityType(CORP_USER_ENTITY_NAME); + corpUserInfoProposal.setEntityUrn(userUrn); + corpUserInfoProposal.setAspectName(CORP_USER_INFO_ASPECT_NAME); + corpUserInfoProposal.setAspect(GenericRecordUtils.serializeAspect(corpUserInfo)); + corpUserInfoProposal.setChangeType(ChangeType.UPSERT); + _entityClient.ingestProposal(corpUserInfoProposal, authentication); + } + + void updateCorpUserStatus(@Nonnull Urn userUrn, Authentication authentication) throws Exception { + // Construct corpUserStatus + CorpUserStatus corpUserStatus = new CorpUserStatus(); + corpUserStatus.setStatus(CORP_USER_STATUS_ACTIVE); + corpUserStatus.setLastModified( + new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + + // Ingest corpUserStatus MCP + final MetadataChangeProposal corpUserStatusProposal = new MetadataChangeProposal(); + corpUserStatusProposal.setEntityType(CORP_USER_ENTITY_NAME); + corpUserStatusProposal.setEntityUrn(userUrn); + corpUserStatusProposal.setAspectName(CORP_USER_STATUS_ASPECT_NAME); + corpUserStatusProposal.setAspect(GenericRecordUtils.serializeAspect(corpUserStatus)); + corpUserStatusProposal.setChangeType(ChangeType.UPSERT); + _entityClient.ingestProposal(corpUserStatusProposal, authentication); + } + + void updateCorpUserCredentials(@Nonnull Urn userUrn, @Nonnull String password, + Authentication authentication) throws Exception { + // Construct corpUserCredentials + CorpUserCredentials corpUserCredentials = new CorpUserCredentials(); + final byte[] salt = getRandomBytes(SALT_TOKEN_LENGTH); + String encryptedSalt = _secretService.encrypt(Base64.getEncoder().encodeToString(salt)); + corpUserCredentials.setSalt(encryptedSalt); + String hashedPassword = getHashedPassword(salt, password); + corpUserCredentials.setHashedPassword(hashedPassword); + + // Ingest corpUserCredentials MCP + final MetadataChangeProposal corpUserCredentialsProposal = new MetadataChangeProposal(); + corpUserCredentialsProposal.setEntityType(CORP_USER_ENTITY_NAME); + corpUserCredentialsProposal.setEntityUrn(userUrn); + corpUserCredentialsProposal.setAspectName(CORP_USER_CREDENTIALS_ASPECT_NAME); + corpUserCredentialsProposal.setAspect(GenericRecordUtils.serializeAspect(corpUserCredentials)); + corpUserCredentialsProposal.setChangeType(ChangeType.UPSERT); + _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); + } + + public String generateNativeUserInviteToken(Authentication authentication) throws Exception { + // Construct inviteToken + InviteToken inviteToken = new InviteToken(); + String token = generateRandomLowercaseToken(INVITE_TOKEN_LENGTH); + inviteToken.setToken(_secretService.encrypt(token)); + + // Ingest corpUserCredentials MCP + final MetadataChangeProposal inviteTokenProposal = new MetadataChangeProposal(); + inviteTokenProposal.setEntityType(INVITE_TOKEN_ENTITY_NAME); + inviteTokenProposal.setEntityUrn(Urn.createFromString(GLOBAL_INVITE_TOKEN)); + inviteTokenProposal.setAspectName(INVITE_TOKEN_ASPECT_NAME); + inviteTokenProposal.setAspect(GenericRecordUtils.serializeAspect(inviteToken)); + inviteTokenProposal.setChangeType(ChangeType.UPSERT); + _entityClient.ingestProposal(inviteTokenProposal, authentication); + + return token; + } + + public String getNativeUserInviteToken(Authentication authentication) throws Exception { + InviteToken inviteToken = (InviteToken) _entityService.getLatestAspect(Urn.createFromString(GLOBAL_INVITE_TOKEN), + INVITE_TOKEN_ASPECT_NAME); + if (inviteToken == null || !inviteToken.hasToken()) { + return generateNativeUserInviteToken(authentication); + } + return _secretService.decrypt(inviteToken.getToken()); + } + + public String generateNativeUserPasswordResetToken(@Nonnull String userUrnString, + Authentication authentication) throws Exception { + Objects.requireNonNull(userUrnString, "userUrnString must not be null!"); + + Urn userUrn = Urn.createFromString(userUrnString); + + CorpUserCredentials corpUserCredentials = + (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + throw new RuntimeException("User does not exist or is a non-native user!"); + } + // Add reset token to CorpUserCredentials + String passwordResetToken = generateRandomLowercaseToken(PASSWORD_RESET_TOKEN_LENGTH); + corpUserCredentials.setPasswordResetToken(_secretService.encrypt(passwordResetToken)); + + long expirationTime = Instant.now().plusMillis(ONE_DAY_MILLIS).toEpochMilli(); + corpUserCredentials.setPasswordResetTokenExpirationTimeMillis(expirationTime); + + // Ingest CorpUserCredentials MCP + final MetadataChangeProposal corpUserCredentialsProposal = new MetadataChangeProposal(); + corpUserCredentialsProposal.setEntityType(CORP_USER_ENTITY_NAME); + corpUserCredentialsProposal.setEntityUrn(userUrn); + corpUserCredentialsProposal.setAspectName(CORP_USER_CREDENTIALS_ASPECT_NAME); + corpUserCredentialsProposal.setAspect(GenericRecordUtils.serializeAspect(corpUserCredentials)); + corpUserCredentialsProposal.setChangeType(ChangeType.UPSERT); + _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); + + return passwordResetToken; + } + + public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull String password, + @Nonnull String resetToken, Authentication authentication) throws Exception { + Objects.requireNonNull(userUrnString, "userUrnString must not be null!"); + Objects.requireNonNull(password, "password must not be null!"); + Objects.requireNonNull(resetToken, "resetToken must not be null!"); + + Urn userUrn = Urn.createFromString(userUrnString); + + CorpUserCredentials corpUserCredentials = + (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + + if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + throw new RuntimeException("User does not exist!"); + } + + if (!corpUserCredentials.hasPasswordResetToken() + || !corpUserCredentials.hasPasswordResetTokenExpirationTimeMillis() + || corpUserCredentials.getPasswordResetTokenExpirationTimeMillis() == null) { + throw new RuntimeException("User has not generated a password reset token!"); + } + + if (!_secretService.decrypt( + corpUserCredentials.getPasswordResetToken()).equals(resetToken)) { + throw new RuntimeException("Invalid reset token. Please ask your administrator to send you an updated link!"); + } + + long currentTimeMillis = Instant.now().toEpochMilli(); + if (currentTimeMillis > corpUserCredentials.getPasswordResetTokenExpirationTimeMillis()) { + throw new RuntimeException("Reset token has expired! Please ask your administrator to create a new one"); + } + + // Construct corpUserCredentials + final byte[] salt = getRandomBytes(SALT_TOKEN_LENGTH); + String encryptedSalt = _secretService.encrypt(Base64.getEncoder().encodeToString(salt)); + corpUserCredentials.setSalt(encryptedSalt); + String hashedPassword = getHashedPassword(salt, password); + corpUserCredentials.setHashedPassword(hashedPassword); + + // Ingest corpUserCredentials MCP + final MetadataChangeProposal corpUserCredentialsProposal = new MetadataChangeProposal(); + corpUserCredentialsProposal.setEntityType(CORP_USER_ENTITY_NAME); + corpUserCredentialsProposal.setEntityUrn(userUrn); + corpUserCredentialsProposal.setAspectName(CORP_USER_CREDENTIALS_ASPECT_NAME); + corpUserCredentialsProposal.setAspect(GenericRecordUtils.serializeAspect(corpUserCredentials)); + corpUserCredentialsProposal.setChangeType(ChangeType.UPSERT); + _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); + } + + byte[] getRandomBytes(int length) { + byte[] randomBytes = new byte[length]; + _secureRandom.nextBytes(randomBytes); + return randomBytes; + } + + String generateRandomLowercaseToken(int length) { + return _secureRandom.ints(length, LOWERCASE_ASCII_START, LOWERCASE_ASCII_END + 1) + .mapToObj(i -> String.valueOf((char) i)) + .collect(Collectors.joining()); + } + + byte[] saltPassword(@Nonnull byte[] salt, @Nonnull String password) throws IOException { + byte[] passwordBytes = password.getBytes(); + ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); + byteArrayOutputStream.write(salt); + byteArrayOutputStream.write(passwordBytes); + return byteArrayOutputStream.toByteArray(); + } + + public String getHashedPassword(@Nonnull byte[] salt, @Nonnull String password) throws IOException { + byte[] saltedPassword = saltPassword(salt, password); + byte[] hashedPassword = _messageDigest.digest(saltedPassword); + return Base64.getEncoder().encodeToString(hashedPassword); + } + + public boolean doesPasswordMatch(@Nonnull String userUrnString, @Nonnull String password) throws Exception { + Objects.requireNonNull(userUrnString, "userUrnSting must not be null!"); + Objects.requireNonNull(password, "Password must not be null!"); + + Urn userUrn = Urn.createFromString(userUrnString); + CorpUserCredentials corpUserCredentials = + (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + return false; + } + + String decryptedSalt = _secretService.decrypt(corpUserCredentials.getSalt()); + byte[] salt = Base64.getDecoder().decode(decryptedSalt); + String storedHashedPassword = corpUserCredentials.getHashedPassword(); + String hashedPassword = getHashedPassword(salt, password); + return storedHashedPassword.equals(hashedPassword); + } +} diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java new file mode 100644 index 00000000000000..d19de607097c0a --- /dev/null +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java @@ -0,0 +1,289 @@ +package com.datahub.authentication.user; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.CorpuserUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.CorpUserCredentials; +import com.linkedin.identity.InviteToken; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.secret.SecretService; +import java.time.Instant; +import java.util.concurrent.TimeUnit; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + + +public class NativeUserServiceTest { + private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; + + private static final String USER_URN_STRING = "urn:li:corpuser:test"; + private static final String FULL_NAME = "MOCK NAME"; + private static final String EMAIL = "mock@email.com"; + private static final String TITLE = "Data Scientist"; + private static final String PASSWORD = "password"; + private static final String INVITE_TOKEN = "inviteToken"; + private static final String ENCRYPTED_INVITE_TOKEN = "encryptedInviteToken"; + private static final String RESET_TOKEN = "inviteToken"; + private static final String ENCRYPTED_RESET_TOKEN = "encryptedInviteToken"; + private static final String ENCRYPTED_SALT = "encryptedSalt"; + private static final Urn USER_URN = new CorpuserUrn(EMAIL); + private static final long ONE_DAY_MILLIS = TimeUnit.DAYS.toMillis(1); + private static final Authentication SYSTEM_AUTHENTICATION = + new Authentication(new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), ""); + + private EntityService _entityService; + private EntityClient _entityClient; + private SecretService _secretService; + private NativeUserService _nativeUserService; + + @BeforeMethod + public void setupTest() throws Exception { + _entityService = mock(EntityService.class); + _entityClient = mock(EntityClient.class); + _secretService = mock(SecretService.class); + + _nativeUserService = new NativeUserService(_entityService, _entityClient, _secretService); + } + + @Test + public void testConstructor() throws Exception { + assertThrows(() -> new NativeUserService(null, _entityClient, _secretService)); + assertThrows(() -> new NativeUserService(_entityService, null, _secretService)); + assertThrows(() -> new NativeUserService(_entityService, _entityClient, null)); + + // Succeeds! + new NativeUserService(_entityService, _entityClient, _secretService); + } + + @Test + public void testCreateNativeUserNullArguments() { + assertThrows(() -> _nativeUserService.createNativeUser(null, FULL_NAME, EMAIL, TITLE, PASSWORD, INVITE_TOKEN, + SYSTEM_AUTHENTICATION)); + assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, null, EMAIL, TITLE, PASSWORD, INVITE_TOKEN, + SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, null, TITLE, PASSWORD, INVITE_TOKEN, + SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, null, PASSWORD, INVITE_TOKEN, + SYSTEM_AUTHENTICATION)); + assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, null, INVITE_TOKEN, + SYSTEM_AUTHENTICATION)); + assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, null, + SYSTEM_AUTHENTICATION)); + } + + @Test(expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "Invalid sign-up token. Please ask your administrator to send you an updated link!") + public void testCreateNativeUserInviteTokenDoesNotExist() throws Exception { + // Nonexistent invite token + when(_entityService.getLatestAspect(any(), eq(INVITE_TOKEN_ASPECT_NAME))).thenReturn(null); + + _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, INVITE_TOKEN, + SYSTEM_AUTHENTICATION); + } + + @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + public void testCreateNativeUserUserAlreadyExists() throws Exception { + InviteToken mockInviteTokenAspect = mock(InviteToken.class); + when(_entityService.getLatestAspect(any(), eq(INVITE_TOKEN_ASPECT_NAME))).thenReturn(mockInviteTokenAspect); + when(mockInviteTokenAspect.hasToken()).thenReturn(true); + when(mockInviteTokenAspect.getToken()).thenReturn(ENCRYPTED_INVITE_TOKEN); + when(_secretService.decrypt(eq(ENCRYPTED_INVITE_TOKEN))).thenReturn(INVITE_TOKEN); + + // The user already exists + when(_entityService.exists(any())).thenReturn(true); + + _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, INVITE_TOKEN, + SYSTEM_AUTHENTICATION); + } + + @Test + public void testCreateNativeUserPasses() throws Exception { + InviteToken mockInviteTokenAspect = mock(InviteToken.class); + when(_entityService.getLatestAspect(any(), eq(INVITE_TOKEN_ASPECT_NAME))).thenReturn(mockInviteTokenAspect); + when(mockInviteTokenAspect.hasToken()).thenReturn(true); + when(mockInviteTokenAspect.getToken()).thenReturn(ENCRYPTED_INVITE_TOKEN); + when(_entityService.exists(any())).thenReturn(false); + when(_secretService.decrypt(eq(ENCRYPTED_INVITE_TOKEN))).thenReturn(INVITE_TOKEN); + when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); + + _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, INVITE_TOKEN, + SYSTEM_AUTHENTICATION); + } + + @Test + public void testUpdateCorpUserInfoPasses() throws Exception { + _nativeUserService.updateCorpUserInfo(USER_URN, FULL_NAME, EMAIL, TITLE, SYSTEM_AUTHENTICATION); + verify(_entityClient).ingestProposal(any(), any()); + } + + @Test + public void testUpdateCorpUserStatusPasses() throws Exception { + _nativeUserService.updateCorpUserStatus(USER_URN, SYSTEM_AUTHENTICATION); + verify(_entityClient).ingestProposal(any(), any()); + } + + @Test + public void testUpdateCorpUserCredentialsPasses() throws Exception { + when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); + + _nativeUserService.updateCorpUserCredentials(USER_URN, PASSWORD, SYSTEM_AUTHENTICATION); + verify(_entityClient).ingestProposal(any(), any()); + } + + @Test + public void testGenerateNativeUserInviteTokenPasses() throws Exception { + when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_INVITE_TOKEN); + + _nativeUserService.generateNativeUserInviteToken(SYSTEM_AUTHENTICATION); + verify(_entityClient).ingestProposal(any(), any()); + } + + @Test + public void testGetNativeUserInviteTokenInviteTokenDoesNotExistPasses() throws Exception { + // Nonexistent invite token + when(_entityService.getLatestAspect(any(), eq(INVITE_TOKEN_ASPECT_NAME))).thenReturn(null); + when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_INVITE_TOKEN); + + _nativeUserService.getNativeUserInviteToken(SYSTEM_AUTHENTICATION); + verify(_entityClient).ingestProposal(any(), any()); + } + + @Test + public void testGetNativeUserInviteTokenPasses() throws Exception { + InviteToken mockInviteTokenAspect = mock(InviteToken.class); + when(_entityService.getLatestAspect(any(), eq(INVITE_TOKEN_ASPECT_NAME))).thenReturn(mockInviteTokenAspect); + when(_entityService.exists(any())).thenReturn(false); + when(mockInviteTokenAspect.hasToken()).thenReturn(true); + when(mockInviteTokenAspect.getToken()).thenReturn(ENCRYPTED_INVITE_TOKEN); + when(_secretService.decrypt(eq(ENCRYPTED_INVITE_TOKEN))).thenReturn(INVITE_TOKEN); + + assertEquals(_nativeUserService.getNativeUserInviteToken(SYSTEM_AUTHENTICATION), INVITE_TOKEN); + } + + @Test + public void testGenerateNativeUserResetTokenNullArguments() { + assertThrows(() -> _nativeUserService.generateNativeUserPasswordResetToken(null, SYSTEM_AUTHENTICATION)); + } + + @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "User does not exist or is a non-native user!") + public void testGenerateNativeUserResetTokenNotNativeUser() throws Exception { + // Nonexistent corpUserCredentials + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn(null); + + _nativeUserService.generateNativeUserPasswordResetToken(USER_URN_STRING, SYSTEM_AUTHENTICATION); + } + + @Test + public void testGenerateNativeUserResetToken() throws Exception { + CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( + mockCorpUserCredentialsAspect); + when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); + when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); + + when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_INVITE_TOKEN); + + _nativeUserService.generateNativeUserPasswordResetToken(USER_URN_STRING, SYSTEM_AUTHENTICATION); + verify(_entityClient).ingestProposal(any(), any()); + } + + @Test + public void testResetCorpUserCredentialsNullArguments() { + assertThrows(() -> _nativeUserService.resetCorpUserCredentials(null, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, null, RESET_TOKEN, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, null, SYSTEM_AUTHENTICATION)); + } + + @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "User has not generated a password reset token!") + public void testResetCorpUserCredentialsNoPasswordResetToken() throws Exception { + CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( + mockCorpUserCredentialsAspect); + when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); + when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); + // No password reset token + when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(false); + + _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + } + + @Test(expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "Invalid reset token. Please ask your administrator to send you an updated link!") + public void testResetCorpUserCredentialsBadResetToken() throws Exception { + CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( + mockCorpUserCredentialsAspect); + when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); + when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); + when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn( + Instant.now().toEpochMilli()); + // Reset token won't match + when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn("badResetToken"); + + _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + } + + @Test(expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "Reset token has expired! Please ask your administrator to create a new one") + public void testResetCorpUserCredentialsExpiredResetToken() throws Exception { + CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( + mockCorpUserCredentialsAspect); + when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); + when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); + when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); + // Reset token expiration time will be before the system time when we run resetCorpUserCredentials + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn(0L); + when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn(RESET_TOKEN); + + _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + } + + @Test + public void testResetCorpUserCredentialsPasses() throws Exception { + CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( + mockCorpUserCredentialsAspect); + when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); + when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); + when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn( + Instant.now().plusMillis(ONE_DAY_MILLIS).toEpochMilli()); + when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn(RESET_TOKEN); + when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); + + _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + verify(_entityClient).ingestProposal(any(), any()); + } + + @Test + public void testDoesPasswordMatchNullArguments() { + assertThrows(() -> _nativeUserService.doesPasswordMatch(null, PASSWORD)); + assertThrows(() -> _nativeUserService.doesPasswordMatch(USER_URN_STRING, null)); + } + + @Test + public void testDoesPasswordMatchNoCorpUserCredentials() throws Exception { + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn(null); + + assertFalse(_nativeUserService.doesPasswordMatch(USER_URN_STRING, PASSWORD)); + } +} diff --git a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/authentication/AuthServiceController.java b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/authentication/AuthServiceController.java index 4b6f0a31abb9b7..60de2de9a8689d 100644 --- a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/authentication/AuthServiceController.java +++ b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/authentication/AuthServiceController.java @@ -1,7 +1,8 @@ package com.datahub.authentication; -import com.datahub.authentication.token.TokenType; import com.datahub.authentication.token.StatelessTokenService; +import com.datahub.authentication.token.TokenType; +import com.datahub.authentication.user.NativeUserService; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -25,6 +26,16 @@ public class AuthServiceController { private static final String USER_ID_FIELD_NAME = "userId"; private static final String ACCESS_TOKEN_FIELD_NAME = "accessToken"; + private static final String USER_URN_FIELD_NAME = "userUrn"; + private static final String FULL_NAME_FIELD_NAME = "fullName"; + private static final String EMAIL_FIELD_NAME = "email"; + private static final String TITLE_FIELD_NAME = "title"; + private static final String PASSWORD_FIELD_NAME = "password"; + private static final String INVITE_TOKEN_FIELD_NAME = "inviteToken"; + private static final String RESET_TOKEN_FIELD_NAME = "resetToken"; + private static final String IS_NATIVE_USER_CREATED_FIELD_NAME = "isNativeUserCreated"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD_NAME = "areNativeUserCredentialsReset"; + private static final String DOES_PASSWORD_MATCH_FIELD_NAME = "doesPasswordMatch"; @Inject StatelessTokenService _statelessTokenService; @@ -35,13 +46,16 @@ public class AuthServiceController { @Inject ConfigurationProvider _configProvider; + @Inject + NativeUserService _nativeUserService; + /** * Generates a JWT access token for as user UI session, provided a unique "user id" to generate the token for inside a JSON * POST body. * * Example Request: * - * POST /generateSessionToken -H "Authorization: Basic :" + * POST /generateSessionTokenForUser -H "Authorization: Basic :" * { * "userId": "datahub" * } @@ -95,7 +109,192 @@ CompletableFuture> generateSessionTokenForUser(final Http }); } - // Currently only internal system is authorized to generate a token on behalf of a user! + /** + * Creates a native DataHub user using the provided full name, email and password. The provided invite token must + * be current otherwise a new user will not be created. + * + * Example Request: + * + * POST /signUp -H "Authorization: Basic :" + * { + * "fullName": "Full Name" + * "userUrn": "urn:li:corpuser:test" + * "email": "email@test.com" + * "title": "Data Scientist" + * "password": "password123" + * "inviteToken": "abcd" + * } + * + * Example Response: + * + * { + * "isNativeUserCreated": true + * } + */ + @PostMapping(value = "/signUp", produces = "application/json;charset=utf-8") + CompletableFuture> signUp(final HttpEntity httpEntity) { + String jsonStr = httpEntity.getBody(); + ObjectMapper mapper = new ObjectMapper(); + JsonNode bodyJson; + try { + bodyJson = mapper.readTree(jsonStr); + } catch (JsonProcessingException e) { + log.error(String.format("Failed to parse json while attempting to create native user %s", jsonStr)); + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + if (bodyJson == null) { + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + /* + * Extract username and password field + */ + JsonNode userUrn = bodyJson.get(USER_URN_FIELD_NAME); + JsonNode fullName = bodyJson.get(FULL_NAME_FIELD_NAME); + JsonNode email = bodyJson.get(EMAIL_FIELD_NAME); + JsonNode title = bodyJson.get(TITLE_FIELD_NAME); + JsonNode password = bodyJson.get(PASSWORD_FIELD_NAME); + JsonNode inviteToken = bodyJson.get(INVITE_TOKEN_FIELD_NAME); + if (fullName == null || userUrn == null || email == null || title == null || password == null + || inviteToken == null) { + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + + String userUrnString = userUrn.asText(); + String fullNameString = fullName.asText(); + String emailString = email.asText(); + String titleString = title.asText(); + String passwordString = password.asText(); + String inviteTokenString = inviteToken.asText(); + log.debug(String.format("Attempting to create credentials for native user %s", userUrnString)); + return CompletableFuture.supplyAsync(() -> { + try { + _nativeUserService.createNativeUser(userUrnString, fullNameString, emailString, titleString, passwordString, + inviteTokenString, AuthenticationContext.getAuthentication()); + String response = buildSignUpResponse(); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error(String.format("Failed to create credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); + } + + /** + * Resets the credentials for a native DataHub user using the provided email and new password. The provided reset + * token must be current otherwise the credentials will not be updated + * + * Example Request: + * + * POST /resetNativeUserCredentials -H "Authorization: Basic :" + * { + * "userUrn": "urn:li:corpuser:test" + * "password": "password123" + * "resetToken": "abcd" + * } + * + * Example Response: + * + * { + * "areNativeUserCredentialsReset": true + * } + */ + @PostMapping(value = "/resetNativeUserCredentials", produces = "application/json;charset=utf-8") + CompletableFuture> resetNativeUserCredentials(final HttpEntity httpEntity) { + String jsonStr = httpEntity.getBody(); + ObjectMapper mapper = new ObjectMapper(); + JsonNode bodyJson; + try { + bodyJson = mapper.readTree(jsonStr); + } catch (JsonProcessingException e) { + log.error(String.format("Failed to parse json while attempting to create native user %s", jsonStr)); + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + if (bodyJson == null) { + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + /* + * Extract username and password field + */ + JsonNode userUrn = bodyJson.get(USER_URN_FIELD_NAME); + JsonNode password = bodyJson.get(PASSWORD_FIELD_NAME); + JsonNode resetToken = bodyJson.get(RESET_TOKEN_FIELD_NAME); + if (userUrn == null || password == null || resetToken == null) { + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + + String userUrnString = userUrn.asText(); + String passwordString = password.asText(); + String resetTokenString = resetToken.asText(); + log.debug(String.format("Attempting to reset credentials for native user %s", userUrnString)); + return CompletableFuture.supplyAsync(() -> { + try { + _nativeUserService.resetCorpUserCredentials(userUrnString, passwordString, resetTokenString, + AuthenticationContext.getAuthentication()); + String response = buildResetNativeUserCredentialsResponse(); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error(String.format("Failed to reset credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); + } + + /** + * Verifies the credentials for a native DataHub user. + * + * Example Request: + * + * POST /verifyNativeUserCredentials -H "Authorization: Basic :" + * { + * "userUrn": "urn:li:corpuser:test" + * "password": "password123" + * } + * + * Example Response: + * + * { + * "passwordMatches": true + * } + */ + @PostMapping(value = "/verifyNativeUserCredentials", produces = "application/json;charset=utf-8") + CompletableFuture> verifyNativeUserCredentials(final HttpEntity httpEntity) { + String jsonStr = httpEntity.getBody(); + ObjectMapper mapper = new ObjectMapper(); + JsonNode bodyJson; + try { + bodyJson = mapper.readTree(jsonStr); + } catch (JsonProcessingException e) { + log.error(String.format("Failed to parse json while attempting to verify native user password %s", jsonStr)); + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + if (bodyJson == null) { + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + /* + * Extract username and password field + */ + JsonNode userUrn = bodyJson.get(USER_URN_FIELD_NAME); + JsonNode password = bodyJson.get(PASSWORD_FIELD_NAME); + if (userUrn == null || password == null) { + return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); + } + + String userUrnString = userUrn.asText(); + String passwordString = password.asText(); + log.debug(String.format("Attempting to verify credentials for native user %s", userUrnString)); + return CompletableFuture.supplyAsync(() -> { + try { + boolean doesPasswordMatch = _nativeUserService.doesPasswordMatch(userUrnString, passwordString); + String response = buildVerifyNativeUserPasswordResponse(doesPasswordMatch); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error(String.format("Failed to verify credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); + } + + // Currently, only internal system is authorized to generate a token on behalf of a user! private boolean isAuthorizedToGenerateSessionToken(final String actorId) { // Verify that the actor is an internal system caller. final String systemClientId = _systemAuthentication.getActor().getId(); @@ -107,4 +306,22 @@ private String buildTokenResponse(final String token) { json.put(ACCESS_TOKEN_FIELD_NAME, token); return json.toString(); } + + private String buildSignUpResponse() { + JSONObject json = new JSONObject(); + json.put(IS_NATIVE_USER_CREATED_FIELD_NAME, true); + return json.toString(); + } + + private String buildResetNativeUserCredentialsResponse() { + JSONObject json = new JSONObject(); + json.put(ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD_NAME, true); + return json.toString(); + } + + private String buildVerifyNativeUserPasswordResponse(final boolean doesPasswordMatch) { + JSONObject json = new JSONObject(); + json.put(DOES_PASSWORD_MATCH_FIELD_NAME, doesPasswordMatch); + return json.toString(); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java new file mode 100644 index 00000000000000..9f510444a51aed --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java @@ -0,0 +1,40 @@ + + +package com.linkedin.gms.factory.auth; + +import com.datahub.authentication.user.NativeUserService; +import com.linkedin.entity.client.JavaEntityClient; +import com.linkedin.gms.factory.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.secret.SecretService; +import javax.annotation.Nonnull; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; +import org.springframework.context.annotation.Scope; + + +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +public class NativeUserServiceFactory { + @Autowired + @Qualifier("entityService") + private EntityService _entityService; + + @Autowired + @Qualifier("javaEntityClient") + private JavaEntityClient _javaEntityClient; + + @Autowired + @Qualifier("dataHubSecretService") + private SecretService _secretService; + + @Bean(name = "nativeUserService") + @Scope("singleton") + @Nonnull + protected NativeUserService getInstance() throws Exception { + return new NativeUserService(this._entityService, this._javaEntityClient, this._secretService); + } +} \ No newline at end of file diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index 79ff05a7c3da06..ff30eb1371a7d1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -1,6 +1,7 @@ package com.linkedin.gms.factory.graphql; import com.datahub.authentication.token.StatefulTokenService; +import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.GmsGraphQLEngine; import com.linkedin.datahub.graphql.GraphQLEngine; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; @@ -97,6 +98,10 @@ public class GraphQLEngineFactory { @Qualifier("timelineService") private TimelineService _timelineService; + @Autowired + @Qualifier("nativeUserService") + private NativeUserService _nativeUserService; + @Value("${platformAnalytics.enabled}") // TODO: Migrate to DATAHUB_ANALYTICS_ENABLED private Boolean isAnalyticsEnabled; @@ -115,6 +120,7 @@ protected GraphQLEngine getInstance() { _timeseriesAspectService, _entityRegistry, _secretService, + _nativeUserService, _configProvider.getIngestion(), _configProvider.getAuthentication(), _configProvider.getAuthorization(), @@ -138,6 +144,7 @@ protected GraphQLEngine getInstance() { _timeseriesAspectService, _entityRegistry, _secretService, + _nativeUserService, _configProvider.getIngestion(), _configProvider.getAuthentication(), _configProvider.getAuthorization(), diff --git a/metadata-service/factories/src/main/resources/application.yml b/metadata-service/factories/src/main/resources/application.yml index 4d56276790b7ba..4074dcd2ff4e00 100644 --- a/metadata-service/factories/src/main/resources/application.yml +++ b/metadata-service/factories/src/main/resources/application.yml @@ -89,7 +89,7 @@ platformAnalytics: visualConfig: assets: - logoUrl: ${REACT_APP_LOGO_URL:#{null}} + logoUrl: ${REACT_APP_LOGO_URL:/assets/platforms/datahublogo.png} # Storage Layer diff --git a/metadata-service/war/src/main/resources/boot/policies.json b/metadata-service/war/src/main/resources/boot/policies.json index 9a1596b48f581a..ae154bd132e2b1 100644 --- a/metadata-service/war/src/main/resources/boot/policies.json +++ b/metadata-service/war/src/main/resources/boot/policies.json @@ -20,7 +20,8 @@ "MANAGE_ACCESS_TOKENS", "MANAGE_DOMAINS", "MANAGE_TESTS", - "MANAGE_GLOSSARIES" + "MANAGE_GLOSSARIES", + "MANAGE_USER_CREDENTIALS" ], "displayName":"Root User - All Platform Privileges", "description":"Grants full platform privileges to root datahub super user.", diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/Constants.java b/metadata-utils/src/main/java/com/linkedin/metadata/Constants.java index fc02f39c0949ab..5a6d264aee93bc 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -15,6 +15,8 @@ public class Constants { public static final String DEFAULT_RUN_ID = "no-run-id-provided"; + public static final String GLOBAL_INVITE_TOKEN = "urn:li:inviteToken:global"; + /** * Entities */ @@ -45,6 +47,8 @@ public class Constants { public static final String DATA_PLATFORM_INSTANCE_ENTITY_NAME = "dataPlatformInstance"; public static final String ACCESS_TOKEN_ENTITY_NAME = "dataHubAccessToken"; public static final String DATA_HUB_UPGRADE_ENTITY_NAME = "dataHubUpgrade"; + public static final String INVITE_TOKEN_ENTITY_NAME = "inviteToken"; + /** * Aspects @@ -68,6 +72,7 @@ public class Constants { public static final String CORP_USER_EDITABLE_INFO_ASPECT_NAME = "corpUserEditableInfo"; public static final String CORP_USER_INFO_ASPECT_NAME = "corpUserInfo"; public static final String CORP_USER_STATUS_ASPECT_NAME = "corpUserStatus"; + public static final String CORP_USER_CREDENTIALS_ASPECT_NAME = "corpUserCredentials"; // Group public static final String CORP_GROUP_KEY_ASPECT_NAME = "corpGroupKey"; @@ -217,6 +222,10 @@ public class Constants { public static final String DATA_HUB_UPGRADE_RESULT_ASPECT_NAME = "dataHubUpgradeResult"; + // Invite Token + public static final String INVITE_TOKEN_ASPECT_NAME = "inviteToken"; + + // acryl-main only public static final String CHANGE_EVENT_PLATFORM_EVENT_NAME = "entityChangeEvent"; /** diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java index 70e4ff12f55f32..20b2074a160645 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java @@ -75,6 +75,10 @@ public class PoliciesConfig { "Manage Glossaries", "Create, edit, and remove Glossary Entities"); + public static final Privilege MANAGE_USER_CREDENTIALS_PRIVILEGE = + Privilege.of("MANAGE_USER_CREDENTIALS", "Manage User Credentials", + "Manage credentials for native DataHub users, including inviting new users and resetting passwords"); + public static final List PLATFORM_PRIVILEGES = ImmutableList.of( MANAGE_POLICIES_PRIVILEGE, MANAGE_USERS_AND_GROUPS_PRIVILEGE, @@ -85,7 +89,8 @@ public class PoliciesConfig { GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE, MANAGE_ACCESS_TOKENS, MANAGE_TESTS_PRIVILEGE, - MANAGE_GLOSSARIES_PRIVILEGE + MANAGE_GLOSSARIES_PRIVILEGE, + MANAGE_USER_CREDENTIALS_PRIVILEGE ); // Resource Privileges // diff --git a/smoke-test/test_e2e.py b/smoke-test/test_e2e.py index 1646f2896b6701..cde8fc616f60c1 100644 --- a/smoke-test/test_e2e.py +++ b/smoke-test/test_e2e.py @@ -865,7 +865,8 @@ def test_frontend_me_query(frontend_session): viewAnalytics managePolicies manageIdentities - generatePersonalAccessTokens + manageUserCredentials + generatePersonalAccessTokens }\n }\n }""" @@ -880,6 +881,7 @@ def test_frontend_me_query(frontend_session): assert res_data["data"]["me"]["corpUser"]["urn"] == "urn:li:corpuser:datahub" assert res_data["data"]["me"]["platformPrivileges"]["viewAnalytics"] is True assert res_data["data"]["me"]["platformPrivileges"]["managePolicies"] is True + assert res_data["data"]["me"]["platformPrivileges"]["manageUserCredentials"] is True assert res_data["data"]["me"]["platformPrivileges"]["manageIdentities"] is True assert ( res_data["data"]["me"]["platformPrivileges"]["generatePersonalAccessTokens"] @@ -1445,3 +1447,159 @@ def test_generate_personal_access_token(frontend_session): assert res_data assert "errors" in res_data # Assert the request fails + +@pytest.mark.dependency(depends=["test_healthchecks", "test_run_ingestion"]) +def test_native_user_endpoints(frontend_session): + # Sign up tests + + # Test getting the invite token + get_invite_token_json = { + "query": """query getNativeUserInviteToken {\n + getNativeUserInviteToken{\n + inviteToken\n + }\n + }""" + } + + get_invite_token_response = frontend_session.post(f"{FRONTEND_ENDPOINT}/api/v2/graphql", json=get_invite_token_json) + get_invite_token_response.raise_for_status() + get_invite_token_res_data = get_invite_token_response.json() + + assert get_invite_token_res_data + assert get_invite_token_res_data["data"] + invite_token = get_invite_token_res_data["data"]["getNativeUserInviteToken"]["inviteToken"] + assert invite_token is not None + assert "error" not in get_invite_token_res_data + + + # Pass the invite token when creating the user + sign_up_json = { + "fullName": "Test User", + "email": "test@email.com", + "password": "password", + "title": "Date Engineer", + "inviteToken": invite_token + } + + sign_up_response = frontend_session.post(f"{FRONTEND_ENDPOINT}/signUp", json=sign_up_json) + assert sign_up_response + assert "error" not in sign_up_response + + # Creating the same user again fails + same_user_sign_up_response = frontend_session.post(f"{FRONTEND_ENDPOINT}/signUp", json=sign_up_json) + assert not same_user_sign_up_response + + # Test that a bad invite token leads to failed sign up + bad_sign_up_json = { + "fullName": "Test2 User", + "email": "test2@email.com", + "password": "password", + "title": "Date Engineer", + "inviteToken": "invite_token" + } + bad_sign_up_response = frontend_session.post(f"{FRONTEND_ENDPOINT}/signUp", json=bad_sign_up_json) + assert not bad_sign_up_response + + frontend_session.cookies.clear() + + + # Reset credentials tests + + # Log in as root again + headers = { + "Content-Type": "application/json", + } + root_login_data = '{"username":"datahub", "password":"datahub"}' + frontend_session.post(f"{FRONTEND_ENDPOINT}/logIn", headers=headers, data=root_login_data) + + # Test creating the password reset token + create_reset_token_json = { + "query": """mutation createNativeUserResetToken($input: CreateNativeUserResetTokenInput!) {\n + createNativeUserResetToken(input: $input) {\n + resetToken\n + }\n + }""", + "variables": { + "input": { + "userUrn": "urn:li:corpuser:test@email.com" + } + }, + } + + create_reset_token_response = frontend_session.post(f"{FRONTEND_ENDPOINT}/api/v2/graphql", json=create_reset_token_json) + create_reset_token_response.raise_for_status() + create_reset_token_res_data = create_reset_token_response.json() + + assert create_reset_token_res_data + assert create_reset_token_res_data["data"] + reset_token = create_reset_token_res_data["data"]["createNativeUserResetToken"]["resetToken"] + assert reset_token is not None + assert "error" not in create_reset_token_res_data + + # Pass the reset token when resetting credentials + reset_credentials_json = { + "email": "test@email.com", + "password": "password", + "resetToken": reset_token + } + + reset_credentials_response = frontend_session.post(f"{FRONTEND_ENDPOINT}/resetNativeUserCredentials", json=reset_credentials_json) + assert reset_credentials_response + assert "error" not in reset_credentials_response + + # Test that a bad reset token leads to failed response + bad_user_reset_credentials_json = { + "email": "test@email.com", + "password": "password", + "resetToken": "reset_token" + } + bad_reset_credentials_response = frontend_session.post(f"{FRONTEND_ENDPOINT}/resetNativeUserCredentials", json=bad_user_reset_credentials_json) + assert not bad_reset_credentials_response + + # Test that only a native user can reset their password + jaas_user_reset_credentials_json = { + "email": "datahub", + "password": "password", + "resetToken": reset_token + } + jaas_user_reset_credentials_response = frontend_session.post(f"{FRONTEND_ENDPOINT}/resetNativeUserCredentials", json=jaas_user_reset_credentials_json) + assert not jaas_user_reset_credentials_response + + + # Tests that unauthenticated users can't invite users or send reset password links + + native_user_frontend_session = requests.Session() + + native_user_login_data = '{"username":"test@email.com", "password":"password"}' + native_user_frontend_session.post(f"{FRONTEND_ENDPOINT}/logIn", headers=headers, data=native_user_login_data) + + unauthenticated_get_invite_token_response = native_user_frontend_session.post(f"{FRONTEND_ENDPOINT}/api/v2/graphql", json=get_invite_token_json) + unauthenticated_get_invite_token_response.raise_for_status() + unauthenticated_get_invite_token_res_data = unauthenticated_get_invite_token_response.json() + + assert unauthenticated_get_invite_token_res_data + assert "errors" in unauthenticated_get_invite_token_res_data + assert unauthenticated_get_invite_token_res_data["data"] + assert unauthenticated_get_invite_token_res_data["data"]["getNativeUserInviteToken"] is None + + unauthenticated_create_reset_token_json = { + "query": """mutation createNativeUserResetToken($input: CreateNativeUserResetTokenInput!) {\n + createNativeUserResetToken(input: $input) {\n + resetToken\n + }\n + }""", + "variables": { + "input": { + "userUrn": "urn:li:corpuser:test@email.com" + } + }, + } + + unauthenticated_create_reset_token_response = native_user_frontend_session.post(f"{FRONTEND_ENDPOINT}/api/v2/graphql", json=unauthenticated_create_reset_token_json) + unauthenticated_create_reset_token_response.raise_for_status() + unauthenticated_create_reset_token_res_data = unauthenticated_create_reset_token_response.json() + + assert unauthenticated_create_reset_token_res_data + assert "errors" in unauthenticated_create_reset_token_res_data + assert unauthenticated_create_reset_token_res_data["data"] + assert unauthenticated_create_reset_token_res_data["data"]["createNativeUserResetToken"] is None diff --git a/smoke-test/tests/cypress/cypress/integration/home/home.js b/smoke-test/tests/cypress/cypress/integration/home/home.js index 925fdc8fbb3898..9d6d4632d83947 100644 --- a/smoke-test/tests/cypress/cypress/integration/home/home.js +++ b/smoke-test/tests/cypress/cypress/integration/home/home.js @@ -3,6 +3,7 @@ describe('home', () => { cy.login(); cy.visit('/'); cy.wait(5000); + cy.get('img[src="/assets/platforms/datahublogo.png"]').should('exist'); cy.get('[data-testid="entity-type-browse-card-DATASET"]').should('exist'); cy.get('[data-testid="entity-type-browse-card-DASHBOARD"]').should('exist'); cy.get('[data-testid="entity-type-browse-card-CHART"]').should('exist');