Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
hsheth2 authored Jan 4, 2024
2 parents 3738195 + 2268c0c commit a593883
Show file tree
Hide file tree
Showing 121 changed files with 4,546 additions and 10,837 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/docker-unified.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@ on:
branches:
- master
paths-ignore:
- "docs-website/**"
- "docs/**"
- "**.md"
pull_request:
branches:
- "**"
paths-ignore:
- "docs-website/**"
- "docs/**"
- "**.md"
release:
Expand Down
3 changes: 0 additions & 3 deletions .github/workflows/metadata-io.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,6 @@ jobs:
python-version: "3.10"
cache: "pip"
- name: Gradle build (and test)
# there is some race condition in gradle build, which makes gradle never terminate in ~30% of the runs
# running build first without datahub-web-react:yarnBuild and then with it is 100% stable
# datahub-frontend:unzipAssets depends on datahub-web-react:yarnBuild but gradle does not know about it
run: |
./gradlew :metadata-io:test
- uses: actions/upload-artifact@v3
Expand Down
28 changes: 28 additions & 0 deletions .github/workflows/pr-labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,31 @@ jobs:
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
configuration-path: ".github/pr-labeler-config.yml"
- uses: actions-ecosystem/[email protected]
# only add names of Acryl Data team members here
if:
${{
!contains(
fromJson('[
"skrydal",
"siladitya2",
"sgomezvillamor",
"ngamanda",
"HarveyLeo",
"frsann",
"bossenti",
"nikolakasev",
"PatrickfBraz",
"cuong-pham",
"sudhakarast",
"tkdrahn",
"rtekal",
"sgm44"
]'),
github.actor
)
}}
with:
github_token: ${{ github.token }}
labels: |
datahub-community-champion
9 changes: 7 additions & 2 deletions .github/workflows/spark-smoke-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,15 @@ jobs:
-x :datahub-web-react:yarnLint \
-x :datahub-web-react:yarnGenerate \
-x :datahub-web-react:yarnInstall \
-x :datahub-web-react:yarnQuickBuild \
-x :datahub-web-react:copyAssets \
-x :datahub-web-react:yarnBuild \
-x :datahub-web-react:distZip \
-x :datahub-web-react:jar
- name: Upload logs
uses: actions/upload-artifact@v3
if: failure()
with:
name: docker logs
path: "docker/build/container-logs/*.log"
- uses: actions/upload-artifact@v3
if: always()
with:
Expand Down
4 changes: 4 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,10 @@ subprojects {
}

plugins.withType(JavaPlugin).configureEach {
if (project.name == 'datahub-web-react') {
return
}

dependencies {
implementation externalDependency.annotationApi
constraints {
Expand Down
18 changes: 0 additions & 18 deletions datahub-frontend/build.gradle
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
plugins {
id "io.github.kobylynskyi.graphql.codegen" version "4.1.1"
id 'scala'
id 'com.palantir.docker'
id 'org.gradle.playframework'
Expand Down Expand Up @@ -39,23 +38,6 @@ artifacts {
archives myTar
}

graphqlCodegen {
// For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md
graphqlSchemaPaths = ["$projectDir/conf/datahub-frontend.graphql".toString()]
outputDir = new File("$projectDir/app/graphql")
packageName = "generated"
generateApis = true
modelValidationAnnotation = ""
customTypesMapping = [
Long: "Long",
]
}

tasks.withType(Checkstyle) {
exclude "**/generated/**"
}


/*
PLAY UPGRADE NOTE
Generates the distribution jars under the expected names. The playFramework plugin only accepts certain name values
Expand Down
11 changes: 7 additions & 4 deletions datahub-frontend/conf/routes
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,14 @@ PUT /openapi/*path c
HEAD /openapi/*path controllers.Application.proxy(path: String, request: Request)
PATCH /openapi/*path controllers.Application.proxy(path: String, request: Request)

# Map static resources from the /public folder to the /assets URL path
GET /assets/*file controllers.Assets.at(path="/public", file)

# Analytics route
POST /track controllers.TrackingController.track(request: Request)

# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle
# Known React asset routes
GET /assets/*file controllers.Assets.at(path="/public/assets", file)
GET /node_modules/*file controllers.Assets.at(path="/public/node_modules", file)
GET /manifest.json controllers.Assets.at(path="/public", file="manifest.json")
GET /robots.txt controllers.Assets.at(path="/public", file="robots.txt")

# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle's index.html
GET /*path controllers.Application.index(path)
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,20 @@
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.EntityType;
import com.linkedin.datahub.graphql.resolvers.BatchLoadUtils;
import graphql.execution.DataFetcherResult;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;

@Slf4j
public class BatchGetEntitiesResolver implements DataFetcher<CompletableFuture<List<Entity>>> {

private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes;
Expand All @@ -30,13 +34,21 @@ public CompletableFuture<List<Entity>> get(DataFetchingEnvironment environment)
final List<Entity> entities = _entitiesProvider.apply(environment);
Map<EntityType, List<Entity>> entityTypeToEntities = new HashMap<>();

entities.forEach(
(entity) -> {
EntityType type = entity.getType();
List<Entity> entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>());
entitiesList.add(entity);
entityTypeToEntities.put(type, entitiesList);
});
Map<String, List<Integer>> entityIndexMap = new HashMap<>();
int index = 0;
for (Entity entity : entities) {
List<Integer> indexList = new ArrayList<>();
if (entityIndexMap.containsKey(entity.getUrn())) {
indexList = entityIndexMap.get(entity.getUrn());
}
indexList.add(index);
entityIndexMap.put(entity.getUrn(), indexList);
index++;
EntityType type = entity.getType();
List<Entity> entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>());
entitiesList.add(entity);
entityTypeToEntities.put(type, entitiesList);
}

List<CompletableFuture<List<Entity>>> entitiesFutures = new ArrayList<>();

Expand All @@ -49,9 +61,32 @@ public CompletableFuture<List<Entity>> get(DataFetchingEnvironment environment)

return CompletableFuture.allOf(entitiesFutures.toArray(new CompletableFuture[0]))
.thenApply(
v ->
entitiesFutures.stream()
.flatMap(future -> future.join().stream())
.collect(Collectors.toList()));
v -> {
Entity[] finalEntityList = new Entity[entities.size()];
// Returned objects can be either of type Entity or wrapped as
// DataFetcherResult<Entity>
// Therefore we need to be working with raw Objects in this area of the code
List<Object> returnedList =
entitiesFutures.stream()
.flatMap(future -> future.join().stream())
.collect(Collectors.toList());
for (Object element : returnedList) {
Entity entity = null;
if (element instanceof DataFetcherResult) {
entity = ((DataFetcherResult<Entity>) element).getData();
} else if (element instanceof Entity) {
entity = (Entity) element;
} else {
throw new RuntimeException(
String.format(
"Cannot process entity because it is neither an Entity not a DataFetcherResult. %s",
element));
}
for (int idx : entityIndexMap.get(entity.getUrn())) {
finalEntityList[idx] = entity;
}
}
return Arrays.asList(finalEntityList);
});
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
package com.linkedin.datahub.graphql.resolvers.load;

import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import static org.testng.Assert.*;

import com.google.common.collect.ImmutableList;
import com.linkedin.datahub.graphql.generated.Dashboard;
import com.linkedin.datahub.graphql.generated.Dataset;
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.types.dataset.DatasetType;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.entity.EntityService;
import graphql.schema.DataFetchingEnvironment;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.dataloader.DataLoader;
import org.dataloader.DataLoaderRegistry;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;

public class BatchGetEntitiesResolverTest {
private EntityClient _entityClient;
private EntityService _entityService;
private DataFetchingEnvironment _dataFetchingEnvironment;

@BeforeMethod
public void setupTest() {
_entityService = mock(EntityService.class);
_dataFetchingEnvironment = mock(DataFetchingEnvironment.class);
_entityClient = mock(EntityClient.class);
}

List<Entity> getRequestEntities(List<String> urnList) {

return urnList.stream()
.map(
urn -> {
if (urn.startsWith("urn:li:dataset")) {
Dataset entity = new Dataset();
entity.setUrn(urn);
return entity;
} else if (urn.startsWith("urn:li:dashboard")) {
Dashboard entity = new Dashboard();
entity.setUrn(urn);
return entity;
} else {
throw new RuntimeException("Can't handle urn " + urn);
}
})
.collect(Collectors.toList());
}

@Test
/** Tests that if responses come back out of order, we stitch them back correctly */
public void testReordering() throws Exception {
Function entityProvider = mock(Function.class);
List<Entity> inputEntities =
getRequestEntities(ImmutableList.of("urn:li:dataset:1", "urn:li:dataset:2"));
when(entityProvider.apply(any())).thenReturn(inputEntities);
BatchGetEntitiesResolver resolver =
new BatchGetEntitiesResolver(
ImmutableList.of(new DatasetType(_entityClient)), entityProvider);

DataLoaderRegistry mockDataLoaderRegistry = mock(DataLoaderRegistry.class);
when(_dataFetchingEnvironment.getDataLoaderRegistry()).thenReturn(mockDataLoaderRegistry);
DataLoader mockDataLoader = mock(DataLoader.class);
when(mockDataLoaderRegistry.getDataLoader(any())).thenReturn(mockDataLoader);

Dataset mockResponseEntity1 = new Dataset();
mockResponseEntity1.setUrn("urn:li:dataset:1");

Dataset mockResponseEntity2 = new Dataset();
mockResponseEntity2.setUrn("urn:li:dataset:2");

CompletableFuture mockFuture =
CompletableFuture.completedFuture(
ImmutableList.of(mockResponseEntity2, mockResponseEntity1));
when(mockDataLoader.loadMany(any())).thenReturn(mockFuture);
when(_entityService.exists(any())).thenReturn(true);
List<Entity> batchGetResponse = resolver.get(_dataFetchingEnvironment).join();
assertEquals(batchGetResponse.size(), 2);
assertEquals(batchGetResponse.get(0), mockResponseEntity1);
assertEquals(batchGetResponse.get(1), mockResponseEntity2);
}

@Test
/** Tests that if input list contains duplicates, we stitch them back correctly */
public void testDuplicateUrns() throws Exception {
Function entityProvider = mock(Function.class);
List<Entity> inputEntities =
getRequestEntities(ImmutableList.of("urn:li:dataset:foo", "urn:li:dataset:foo"));
when(entityProvider.apply(any())).thenReturn(inputEntities);
BatchGetEntitiesResolver resolver =
new BatchGetEntitiesResolver(
ImmutableList.of(new DatasetType(_entityClient)), entityProvider);

DataLoaderRegistry mockDataLoaderRegistry = mock(DataLoaderRegistry.class);
when(_dataFetchingEnvironment.getDataLoaderRegistry()).thenReturn(mockDataLoaderRegistry);
DataLoader mockDataLoader = mock(DataLoader.class);
when(mockDataLoaderRegistry.getDataLoader(any())).thenReturn(mockDataLoader);

Dataset mockResponseEntity = new Dataset();
mockResponseEntity.setUrn("urn:li:dataset:foo");

CompletableFuture mockFuture =
CompletableFuture.completedFuture(ImmutableList.of(mockResponseEntity));
when(mockDataLoader.loadMany(any())).thenReturn(mockFuture);
when(_entityService.exists(any())).thenReturn(true);
List<Entity> batchGetResponse = resolver.get(_dataFetchingEnvironment).join();
assertEquals(batchGetResponse.size(), 2);
assertEquals(batchGetResponse.get(0), mockResponseEntity);
assertEquals(batchGetResponse.get(1), mockResponseEntity);
}
}
4 changes: 1 addition & 3 deletions datahub-web-react/.env
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
PUBLIC_URL=/assets
REACT_APP_THEME_CONFIG=theme_light.config.json
SKIP_PREFLIGHT_CHECK=true
BUILD_PATH=build/yarn
REACT_APP_PROXY_TARGET=http://localhost:9002
REACT_APP_PROXY_TARGET=http://localhost:9002
3 changes: 2 additions & 1 deletion datahub-web-react/.eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ module.exports = {
'airbnb-typescript',
'airbnb/hooks',
'plugin:@typescript-eslint/recommended',
'plugin:jest/recommended',
'plugin:vitest/recommended',
'prettier',
],
plugins: ['@typescript-eslint'],
Expand Down Expand Up @@ -46,6 +46,7 @@ module.exports = {
argsIgnorePattern: '^_',
},
],
'vitest/prefer-to-be': 'off',
},
settings: {
react: {
Expand Down
Loading

0 comments on commit a593883

Please sign in to comment.