Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
hsheth2 authored Feb 26, 2024
2 parents 8846e33 + a1f2216 commit 0771f11
Show file tree
Hide file tree
Showing 30 changed files with 6,036 additions and 16 deletions.
8 changes: 7 additions & 1 deletion .github/workflows/pr-labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,13 @@ jobs:
"maggiehays",
"mrjefflewis",
"pedro93",
"RyanHolstien"
"RyanHolstien",
"Kunal-kankriya",
"purnimagarg1",
"gaurav2733",
"dushayntAW",
"AvaniSiddhapuraAPT",
"akarsh991"
]'),
github.actor
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,25 @@
import com.datahub.authorization.PolicyFetcher;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.exception.AuthorizationException;
import com.linkedin.datahub.graphql.generated.AndFilterInput;
import com.linkedin.datahub.graphql.generated.FacetFilterInput;
import com.linkedin.datahub.graphql.generated.ListPoliciesInput;
import com.linkedin.datahub.graphql.generated.ListPoliciesResult;
import com.linkedin.datahub.graphql.generated.Policy;
import com.linkedin.datahub.graphql.resolvers.ResolverUtils;
import com.linkedin.datahub.graphql.resolvers.policy.mappers.PolicyInfoPolicyMapper;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.query.filter.Filter;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;

@Slf4j
public class ListPoliciesResolver implements DataFetcher<CompletableFuture<ListPoliciesResult>> {

private static final Integer DEFAULT_START = 0;
Expand All @@ -40,9 +48,20 @@ public CompletableFuture<ListPoliciesResult> get(final DataFetchingEnvironment e
final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart();
final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount();
final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery();
final List<AndFilterInput> filters =
input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>();
final List<FacetFilterInput> facetFilters =
filters.stream()
.map(AndFilterInput::getAnd)
.flatMap(List::stream)
.collect(Collectors.toList());
log.debug(
"User {} listing policies with filters {}", context.getActorUrn(), filters.toString());

final Filter filter = ResolverUtils.buildFilter(facetFilters, Collections.emptyList());

return _policyFetcher
.fetchPolicies(start, query, count, context.getAuthentication())
.fetchPolicies(start, query, count, filter, context.getAuthentication())
.thenApply(
policyFetchResult -> {
final ListPoliciesResult result = new ListPoliciesResult();
Expand Down
5 changes: 5 additions & 0 deletions datahub-graphql-core/src/main/resources/entity.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -8682,6 +8682,11 @@ input ListPoliciesInput {
Optional search query
"""
query: String

"""
A list of disjunctive criterion for the filter. (or operation to combine filters)
"""
orFilters: [AndFilterInput!]
}

"""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package com.linkedin.datahub.upgrade.config;

import com.linkedin.datahub.upgrade.system.entity.steps.BackfillPolicyFields;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.search.SearchService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

@Configuration
public class BackfillPolicyFieldsConfig {

@Bean
public BackfillPolicyFields backfillPolicyFields(
EntityService<?> entityService,
SearchService searchService,
@Value("${systemUpdate.policyFields.enabled}") final boolean enabled,
@Value("${systemUpdate.policyFields.reprocess.enabled}") final boolean reprocessEnabled,
@Value("${systemUpdate.policyFields.batchSize}") final Integer batchSize) {
return new BackfillPolicyFields(
entityService, searchService, enabled, reprocessEnabled, batchSize);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices;
import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices;
import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2;
import com.linkedin.datahub.upgrade.system.entity.steps.BackfillPolicyFields;
import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL;
import com.linkedin.gms.factory.common.TopicConventionFactory;
import com.linkedin.gms.factory.config.ConfigurationProvider;
Expand Down Expand Up @@ -40,7 +41,8 @@ public SystemUpdate systemUpdate(
final GitVersion gitVersion,
@Qualifier("revision") String revision,
final BackfillBrowsePathsV2 backfillBrowsePathsV2,
final ReindexDataJobViaNodesCLL reindexDataJobViaNodesCLL) {
final ReindexDataJobViaNodesCLL reindexDataJobViaNodesCLL,
final BackfillPolicyFields backfillPolicyFields) {

String version = String.format("%s-%s", gitVersion.getVersion(), revision);
return new SystemUpdate(
Expand All @@ -49,7 +51,8 @@ public SystemUpdate systemUpdate(
kafkaEventProducer,
version,
backfillBrowsePathsV2,
reindexDataJobViaNodesCLL);
reindexDataJobViaNodesCLL,
backfillPolicyFields);
}

@Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices;
import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep;
import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2;
import com.linkedin.datahub.upgrade.system.entity.steps.BackfillPolicyFields;
import com.linkedin.datahub.upgrade.system.via.ReindexDataJobViaNodesCLL;
import com.linkedin.metadata.dao.producer.KafkaEventProducer;
import java.util.List;
Expand All @@ -26,11 +27,13 @@ public SystemUpdate(
final KafkaEventProducer kafkaEventProducer,
final String version,
final BackfillBrowsePathsV2 backfillBrowsePathsV2,
final ReindexDataJobViaNodesCLL upgradeViaNodeCll) {
final ReindexDataJobViaNodesCLL upgradeViaNodeCll,
final BackfillPolicyFields backfillPolicyFields) {

_preStartupUpgrades = List.of(buildIndicesJob);
_steps = List.of(new DataHubStartupStep(kafkaEventProducer, version));
_postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2, upgradeViaNodeCll);
_postStartupUpgrades =
List.of(cleanIndicesJob, backfillBrowsePathsV2, upgradeViaNodeCll, backfillPolicyFields);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
package com.linkedin.datahub.upgrade.system.entity.steps;

import com.google.common.collect.ImmutableList;
import com.linkedin.datahub.upgrade.Upgrade;
import com.linkedin.datahub.upgrade.UpgradeStep;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.search.SearchService;
import java.util.List;

public class BackfillPolicyFields implements Upgrade {
private final List<UpgradeStep> _steps;

public BackfillPolicyFields(
EntityService<?> entityService,
SearchService searchService,
boolean enabled,
boolean reprocessEnabled,
Integer batchSize) {
if (enabled) {
_steps =
ImmutableList.of(
new BackfillPolicyFieldsStep(
entityService, searchService, reprocessEnabled, batchSize));
} else {
_steps = ImmutableList.of();
}
}

@Override
public String id() {
return "BackfillPolicyFields";
}

@Override
public List<UpgradeStep> steps() {
return _steps;
}
}
Loading

0 comments on commit 0771f11

Please sign in to comment.