Skip to content

Commit

Permalink
Merge branch 'main' into add-remove-user-teams-api
Browse files Browse the repository at this point in the history
# Conflicts:
#	openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java
  • Loading branch information
sonika-shah committed Nov 24, 2024
2 parents f103ac2 + b3d765d commit cd0f582
Show file tree
Hide file tree
Showing 39 changed files with 1,168 additions and 248 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ public PeriodicBatchEntityTrigger(

StartEvent startEvent =
new StartEventBuilder().id(getFlowableElementId(triggerWorkflowId, "startEvent")).build();
startEvent.setAsynchronousLeave(true);
oTimerDefinition.ifPresent(startEvent::addEventDefinition);
process.addFlowElement(startEvent);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,13 @@ private SearchClient.SearchResultListMapper fetchEntities(

try {
return searchRepository.listWithDeepPagination(
entityType, null, searchFilter, searchSortFilter, batchSize, searchAfter);
entityType,
null,
searchFilter,
new String[] {"fullyQualifiedName"},
searchSortFilter,
batchSize,
searchAfter);
} catch (IOException e) {
throw new RuntimeException(e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
import org.openmetadata.service.util.AsyncService;
import org.openmetadata.service.util.BulkAssetsOperationResponse;
import org.openmetadata.service.util.CSVExportResponse;
import org.openmetadata.service.util.CSVImportResponse;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.EntityUtil.Fields;
import org.openmetadata.service.util.RestUtil;
Expand Down Expand Up @@ -465,6 +466,28 @@ public Response bulkRemoveFromAssetsAsync(
return Response.ok().entity(response).type(MediaType.APPLICATION_JSON).build();
}

public Response importCsvInternalAsync(
SecurityContext securityContext, String name, String csv, boolean dryRun) {
OperationContext operationContext =
new OperationContext(entityType, MetadataOperation.EDIT_ALL);
authorizer.authorize(securityContext, operationContext, getResourceContextByName(name));
String jobId = UUID.randomUUID().toString();
ExecutorService executorService = AsyncService.getInstance().getExecutorService();
executorService.submit(
() -> {
try {
CsvImportResult result = importCsvInternal(securityContext, name, csv, dryRun);
WebsocketNotificationHandler.sendCsvImportCompleteNotification(
jobId, securityContext, result);
} catch (Exception e) {
WebsocketNotificationHandler.sendCsvImportFailedNotification(
jobId, securityContext, e.getMessage());
}
});
CSVImportResponse response = new CSVImportResponse(jobId, "Import initiated successfully.");
return Response.ok().entity(response).type(MediaType.APPLICATION_JSON).build();
}

public String exportCsvInternal(SecurityContext securityContext, String name) throws IOException {
OperationContext operationContext =
new OperationContext(entityType, MetadataOperation.VIEW_ALL);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -505,6 +505,41 @@ public CsvImportResult importCsv(
return importCsvInternal(securityContext, name, csv, dryRun);
}

@PUT
@Path("/name/{name}/importAsync")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
@Valid
@Operation(
operationId = "importDatabaseAsync",
summary = "Import database schemas from CSV asynchronously",
description =
"Import database schemas from CSV to update database schemas asynchronously (no creation allowed).",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import initiated successfully",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public Response importCsvAsync(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database", schema = @Schema(type = "string"))
@PathParam("name")
String name,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv) {
return importCsvInternalAsync(securityContext, name, csv, dryRun);
}

@PUT
@Path("/{id}/vote")
@Operation(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,40 @@ public CsvImportResult importCsv(
return importCsvInternal(securityContext, name, csv, dryRun);
}

@PUT
@Path("/name/{name}/importAsync")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
@Valid
@Operation(
operationId = "importDatabaseSchemaAsync",
summary =
"Import tables from CSV to update database schema asynchronously (no creation allowed)",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import initiated successfully",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public Response importCsvAsync(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database schema", schema = @Schema(type = "string"))
@PathParam("name")
String name,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv) {
return importCsvInternalAsync(securityContext, name, csv, dryRun);
}

@PUT
@Path("/{id}/vote")
@Operation(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -534,6 +534,38 @@ public CsvImportResult importCsv(
return importCsvInternal(securityContext, name, csv, dryRun);
}

@PUT
@Path("/name/{name}/importAsync")
@Consumes(MediaType.TEXT_PLAIN)
@Valid
@Operation(
operationId = "importTableAsync",
summary = "Import columns from CSV to update table asynchronously (no creation allowed)",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import result",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public Response importCsvAsync(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the table", schema = @Schema(type = "string"))
@PathParam("name")
String name,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv) {
return importCsvInternalAsync(securityContext, name, csv, dryRun);
}

@DELETE
@Path("/{id}")
@Operation(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -578,6 +578,36 @@ public CsvImportResult importCsv(
return importCsvInternal(securityContext, name, csv, dryRun);
}

@PUT
@Path("/name/{name}/importAsync")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
@Valid
@Operation(
operationId = "importGlossaryAsync",
summary = "Import glossary in CSV format asynchronously",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import initiated successfully",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public Response importCsvAsync(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the glossary", schema = @Schema(type = "string"))
@PathParam("name")
String name,
@RequestBody(description = "CSV data to import", required = true) String csv,
@Parameter(description = "Dry run the import", schema = @Schema(type = "boolean"))
@QueryParam("dryRun")
@DefaultValue("true")
boolean dryRun) {
return importCsvInternalAsync(securityContext, name, csv, dryRun);
}

private Glossary getGlossary(CreateGlossary create, String user) {
return getGlossary(repository, create, user);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -523,6 +523,40 @@ public CsvImportResult importCsv(
return importCsvInternal(securityContext, name, csv, dryRun);
}

@PUT
@Path("/name/{name}/importAsync")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
@Valid
@Operation(
operationId = "importDatabaseServiceAsync",
summary =
"Import service from CSV to update database service asynchronously (no creation allowed)",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import initiated successfully",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public Response importCsvAsync(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database Service", schema = @Schema(type = "string"))
@PathParam("name")
String name,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv) {
return importCsvInternalAsync(securityContext, name, csv, dryRun);
}

@DELETE
@Path("/{id}")
@Operation(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -731,6 +731,37 @@ public Response deleteTeamUser(
.toResponse();
}

@PUT
@Path("/name/{name}/importAsync")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
@Valid
@Operation(
operationId = "importTeamsAsync",
summary = "Import from CSV to create, and update teams asynchronously.",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import initiated successfully",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public Response importCsvAsync(
@Context SecurityContext securityContext,
@PathParam("name") String name,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv) {
return importCsvInternalAsync(securityContext, name, csv, dryRun);
}

private Team getTeam(CreateTeam ct, String user) {
if (ct.getTeamType().equals(TeamType.ORGANIZATION)) {
throw new IllegalArgumentException(CREATE_ORGANIZATION);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1482,6 +1482,41 @@ public CsvImportResult importCsv(
return importCsvInternal(securityContext, team, csv, dryRun);
}

@PUT
@Path("/importAsync")
@Consumes(MediaType.TEXT_PLAIN)
@Valid
@Operation(
operationId = "importTeamsAsync",
summary = "Import from CSV to create, and update teams asynchronously.",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import result",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public Response importCsvAsync(
@Context SecurityContext securityContext,
@Parameter(
description = "Name of the team to under which the users are imported to",
required = true,
schema = @Schema(type = "string"))
@QueryParam("team")
String team,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv) {
return importCsvInternalAsync(securityContext, team, csv, dryRun);
}

public void validateEmailAlreadyExists(String email) {
if (repository.checkEmailAlreadyExists(email)) {
throw new CustomExceptionMessage(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@ SearchResultListMapper listWithDeepPagination(
String index,
String query,
String filter,
String[] fields,
SearchSortFilter searchSortFilter,
int size,
Object[] searchAfter)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -841,13 +841,20 @@ public SearchClient.SearchResultListMapper listWithDeepPagination(
String entityType,
String query,
String filter,
String[] fields,
SearchSortFilter searchSortFilter,
int size,
Object[] searchAfter)
throws IOException {
IndexMapping index = entityIndexMap.get(entityType);
return searchClient.listWithDeepPagination(
index.getIndexName(clusterAlias), query, filter, searchSortFilter, size, searchAfter);
index.getIndexName(clusterAlias),
query,
filter,
fields,
searchSortFilter,
size,
searchAfter);
}

public Response searchBySourceUrl(String sourceUrl) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -700,6 +700,7 @@ public SearchResultListMapper listWithDeepPagination(
String index,
String query,
String filter,
String[] fields,
SearchSortFilter searchSortFilter,
int size,
Object[] searchAfter)
Expand All @@ -710,6 +711,10 @@ public SearchResultListMapper listWithDeepPagination(
if (!nullOrEmpty(query)) {
searchSourceBuilder = getSearchSourceBuilder(index, query, 0, size);
}
if (!nullOrEmpty(fields)) {
searchSourceBuilder.fetchSource(fields, null);
}

if (Optional.ofNullable(filter).isPresent()) {
getSearchFilter(filter, searchSourceBuilder, !nullOrEmpty(query));
}
Expand Down
Loading

0 comments on commit cd0f582

Please sign in to comment.