Skip to content

Commit

Permalink
Merge branch 'master' into airflow_disable
Browse files Browse the repository at this point in the history
  • Loading branch information
treff7es authored Dec 12, 2024
2 parents f5f47e3 + 7339848 commit d5279ee
Show file tree
Hide file tree
Showing 501 changed files with 138,285 additions and 2,490 deletions.
20 changes: 6 additions & 14 deletions .github/workflows/airflow-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,29 +34,21 @@ jobs:
include:
# Note: this should be kept in sync with tox.ini.
- python-version: "3.8"
extra_pip_requirements: "apache-airflow~=2.1.4"
extra_pip_extras: plugin-v1
- python-version: "3.8"
extra_pip_requirements: "apache-airflow~=2.2.4"
extra_pip_extras: plugin-v1
extra_pip_requirements: "apache-airflow~=2.3.4"
extra_pip_extras: test-airflow23
- python-version: "3.10"
extra_pip_requirements: "apache-airflow~=2.4.3"
extra_pip_extras: plugin-v2,test-airflow24
extra_pip_extras: test-airflow24
- python-version: "3.10"
extra_pip_requirements: "apache-airflow~=2.6.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.10.txt"
extra_pip_extras: plugin-v2
- python-version: "3.10"
extra_pip_requirements: "apache-airflow~=2.7.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt"
extra_pip_extras: plugin-v2
- python-version: "3.10"
extra_pip_requirements: "apache-airflow~=2.8.1 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.10.txt"
extra_pip_extras: plugin-v2
- python-version: "3.11"
extra_pip_requirements: "apache-airflow~=2.9.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.9.3/constraints-3.11.txt"
extra_pip_extras: plugin-v2
- python-version: "3.11"
extra_pip_requirements: "apache-airflow~=2.10.2 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.11.txt"
extra_pip_extras: plugin-v2
extra_pip_requirements: "apache-airflow~=2.10.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.10.3/constraints-3.11.txt"
fail-fast: false
steps:
- name: Set up JDK 17
Expand Down Expand Up @@ -88,10 +80,10 @@ jobs:
!**/binary/**
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: airflow,airflow-${{ matrix.extra_pip_extras }}
name: pytest-airflow-${{ matrix.python-version }}-${{ matrix.extra_pip_requirements }}
Expand Down
10 changes: 10 additions & 0 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,16 @@ jobs:
!**/binary/**
- name: Ensure codegen is updated
uses: ./.github/actions/ensure-codegen-updated
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: ${{ matrix.timezone }}
name: ${{ matrix.command }}
verbose: true

quickstart-compose-validation:
runs-on: ubuntu-latest
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/dagster-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ jobs:
**/junit.*.xml
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: dagster-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }}
name: pytest-dagster
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/gx-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,10 @@ jobs:
**/junit.*.xml
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: gx-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }}
name: pytest-gx
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/metadata-ingestion.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,10 @@ jobs:
!**/binary/**
- name: Upload coverage to Codecov
if: ${{ always() && matrix.python-version == '3.10' }}
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: pytest-${{ matrix.command }}
name: pytest-${{ matrix.command }}
Expand Down
9 changes: 9 additions & 0 deletions .github/workflows/metadata-io.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,15 @@ jobs:
!**/binary/**
- name: Ensure codegen is updated
uses: ./.github/actions/ensure-codegen-updated
- name: Upload coverage to Codecov
if: ${{ always()}}
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./build/coverage-reports/
fail_ci_if_error: false
name: metadata-io-test
verbose: true

event-file:
runs-on: ubuntu-latest
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/prefect-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,10 @@ jobs:
!**/binary/**
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
directory: ./build/coverage-reports/
fail_ci_if_error: false
flags: prefect,prefect-${{ matrix.extra_pip_extras }}
name: pytest-prefect-${{ matrix.python-version }}
Expand Down
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ buildscript {
ext.hazelcastVersion = '5.3.6'
ext.ebeanVersion = '15.5.2'
ext.googleJavaFormatVersion = '1.18.1'
ext.openLineageVersion = '1.24.2'
ext.openLineageVersion = '1.25.0'
ext.logbackClassicJava8 = '1.2.12'

ext.docker_registry = 'acryldata'
Expand Down
10 changes: 9 additions & 1 deletion datahub-frontend/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ plugins {
id 'org.gradle.playframework'
}

apply from: "../gradle/versioning/versioning.gradle"
apply from: '../gradle/versioning/versioning.gradle'
apply from: './play.gradle'
apply from: '../gradle/coverage/java-coverage.gradle'

ext {
docker_repo = 'datahub-frontend-react'
Expand All @@ -18,6 +19,13 @@ java {
}
}

test {
jacoco {
// jacoco instrumentation is failing when dealing with code of this dependency, excluding it.
excludes = ["com/gargoylesoftware/**"]
}
}

model {
// Must specify the dependency here as "stage" is added by rule based model.
tasks.myTar {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
import com.linkedin.datahub.graphql.generated.EntityPath;
import com.linkedin.datahub.graphql.generated.EntityRelationship;
import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy;
import com.linkedin.datahub.graphql.generated.FacetMetadata;
import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint;
import com.linkedin.datahub.graphql.generated.FormActorAssignment;
import com.linkedin.datahub.graphql.generated.FreshnessContract;
Expand Down Expand Up @@ -1474,6 +1475,19 @@ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder
"entity",
new EntityTypeResolver(
entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity())))
.type(
"FacetMetadata",
typeWiring ->
typeWiring.dataFetcher(
"entity",
new EntityTypeResolver(
entityTypes,
(env) -> {
FacetMetadata facetMetadata = env.getSource();
return facetMetadata.getEntity() != null
? facetMetadata.getEntity()
: null;
})))
.type(
"LineageRelationship",
typeWiring ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,11 @@ public static boolean canManageStructuredProperties(@Nonnull QueryContext contex
context.getOperationContext(), PoliciesConfig.MANAGE_STRUCTURED_PROPERTIES_PRIVILEGE);
}

public static boolean canViewStructuredPropertiesPage(@Nonnull QueryContext context) {
return AuthUtil.isAuthorized(
context.getOperationContext(), PoliciesConfig.VIEW_STRUCTURED_PROPERTIES_PAGE_PRIVILEGE);
}

public static boolean canManageForms(@Nonnull QueryContext context) {
return AuthUtil.isAuthorized(
context.getOperationContext(), PoliciesConfig.MANAGE_DOCUMENTATION_FORMS_PRIVILEGE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,10 @@ public CompletableFuture<AuthenticatedUser> get(DataFetchingEnvironment environm
BusinessAttributeAuthorizationUtils.canCreateBusinessAttribute(context));
platformPrivileges.setManageBusinessAttributes(
BusinessAttributeAuthorizationUtils.canManageBusinessAttribute(context));
platformPrivileges.setManageStructuredProperties(
AuthorizationUtils.canManageStructuredProperties(context));
platformPrivileges.setViewStructuredPropertiesPage(
AuthorizationUtils.canViewStructuredPropertiesPage(context));
// Construct and return authenticated user object.
final AuthenticatedUser authUser = new AuthenticatedUser();
authUser.setCorpUser(corpUser);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,7 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen
.setDataContractsEnabled(_featureFlags.isDataContractsEnabled())
.setEditableDatasetNameEnabled(_featureFlags.isEditableDatasetNameEnabled())
.setShowSeparateSiblings(_featureFlags.isShowSeparateSiblings())
.setShowManageStructuredProperties(_featureFlags.isShowManageStructuredProperties())
.build();

appConfig.setFeatureFlags(featureFlagsConfig);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,17 @@ public CompletableFuture<AggregateResults> get(DataFetchingEnvironment environme

final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters());

final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags());
final SearchFlags searchFlags =
input.getSearchFlags() != null
? mapInputFlags(context, input.getSearchFlags())
: new SearchFlags();

final List<String> facets =
input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null;

// do not include default facets if we're requesting any facets specifically
searchFlags.setIncludeDefaultFacets(facets == null || facets.size() <= 0);

List<String> finalEntities =
maybeResolvedView != null
? SearchUtils.intersectEntityTypes(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,28 @@

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument;
import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*;
import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames;

import com.google.common.collect.ImmutableList;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils;
import com.linkedin.datahub.graphql.generated.EntityType;
import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput;
import com.linkedin.datahub.graphql.generated.SearchResults;
import com.linkedin.datahub.graphql.resolvers.ResolverUtils;
import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.query.SearchFlags;
import com.linkedin.metadata.query.filter.Condition;
import com.linkedin.metadata.query.filter.ConjunctiveCriterion;
import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray;
import com.linkedin.metadata.query.filter.CriterionArray;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.query.filter.SortCriterion;
import com.linkedin.metadata.search.SearchResult;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.metadata.utils.CriterionUtils;
import com.linkedin.view.DataHubViewInfo;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
Expand Down Expand Up @@ -64,24 +73,7 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment)
ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters());

SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags());
List<SortCriterion> sortCriteria;
if (input.getSortInput() != null) {
if (input.getSortInput().getSortCriteria() != null) {
sortCriteria =
input.getSortInput().getSortCriteria().stream()
.map(SearchUtils::mapSortCriterion)
.collect(Collectors.toList());
} else {
sortCriteria =
input.getSortInput().getSortCriterion() != null
? Collections.singletonList(
mapSortCriterion(input.getSortInput().getSortCriterion()))
: Collections.emptyList();
}

} else {
sortCriteria = Collections.emptyList();
}
List<SortCriterion> sortCriteria = SearchUtils.getSortCriteria(input.getSortInput());

try {
log.debug(
Expand All @@ -101,6 +93,14 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment)
return SearchUtils.createEmptySearchResults(start, count);
}

boolean shouldIncludeStructuredPropertyFacets =
input.getSearchFlags() != null
&& input.getSearchFlags().getIncludeStructuredPropertyFacets() != null
? input.getSearchFlags().getIncludeStructuredPropertyFacets()
: false;
List<String> structuredPropertyFacets =
shouldIncludeStructuredPropertyFacets ? getStructuredPropertyFacets(context) : null;

return UrnSearchResultsMapper.map(
context,
_entityClient.searchAcrossEntities(
Expand All @@ -113,7 +113,8 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment)
: baseFilter,
start,
count,
sortCriteria));
sortCriteria,
structuredPropertyFacets));
} catch (Exception e) {
log.error(
"Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}",
Expand All @@ -133,4 +134,45 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment)
this.getClass().getSimpleName(),
"get");
}

private List<String> getStructuredPropertyFacets(final QueryContext context) {
try {
SearchFlags searchFlags = new SearchFlags().setSkipCache(true);
SearchResult result =
_entityClient.searchAcrossEntities(
context.getOperationContext().withSearchFlags(flags -> searchFlags),
getEntityNames(ImmutableList.of(EntityType.STRUCTURED_PROPERTY)),
"*",
createStructuredPropertyFilter(),
0,
100,
Collections.emptyList(),
null);
return result.getEntities().stream()
.map(entity -> String.format("structuredProperties.%s", entity.getEntity().getId()))
.collect(Collectors.toList());
} catch (Exception e) {
log.error("Failed to get structured property facets to filter on", e);
return Collections.emptyList();
}
}

private Filter createStructuredPropertyFilter() {
return new Filter()
.setOr(
new ConjunctiveCriterionArray(
ImmutableList.of(
new ConjunctiveCriterion()
.setAnd(
new CriterionArray(
ImmutableList.of(
CriterionUtils.buildCriterion(
"filterStatus", Condition.EQUAL, "ENABLED")))),
new ConjunctiveCriterion()
.setAnd(
new CriterionArray(
ImmutableList.of(
CriterionUtils.buildCriterion(
"showInSearchFilters", Condition.EQUAL, "true")))))));
}
}
Loading

0 comments on commit d5279ee

Please sign in to comment.