Skip to content

Commit

Permalink
refactor(ingest/s3): use groupby_unsorted (#12412)
Browse files Browse the repository at this point in the history
  • Loading branch information
eagle-25 authored Jan 22, 2025
1 parent 0283a8c commit 3668dfc
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 55 deletions.
25 changes: 1 addition & 24 deletions metadata-ingestion/src/datahub/ingestion/source/aws/s3_util.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
import logging
import os
from collections import defaultdict
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional

if TYPE_CHECKING:
from mypy_boto3_s3.service_resource import ObjectSummary

from typing import Optional

S3_PREFIXES = ["s3://", "s3n://", "s3a://"]

Expand Down Expand Up @@ -73,21 +68,3 @@ def get_key_prefix(s3_uri: str) -> str:
f"Not an S3 URI. Must start with one of the following prefixes: {str(S3_PREFIXES)}"
)
return strip_s3_prefix(s3_uri).split("/", maxsplit=1)[1]


def group_s3_objects_by_dirname(
s3_objects: Iterable["ObjectSummary"],
) -> Dict[str, List["ObjectSummary"]]:
"""
Groups S3 objects by their directory name.
If a s3_object in the root directory (i.e., s3://bucket/file.txt), it is grouped under '/'.
"""
grouped_s3_objs = defaultdict(list)
for obj in s3_objects:
if "/" in obj.key:
dirname = obj.key.rsplit("/", 1)[0]
else:
dirname = "/"
grouped_s3_objs[dirname].append(obj)
return grouped_s3_objs
8 changes: 6 additions & 2 deletions metadata-ingestion/src/datahub/ingestion/source/s3/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
get_bucket_name,
get_bucket_relative_path,
get_key_prefix,
group_s3_objects_by_dirname,
strip_s3_prefix,
)
from datahub.ingestion.source.data_lake_common.data_lake_utils import ContainerWUCreator
Expand Down Expand Up @@ -73,6 +72,7 @@
_Aspect,
)
from datahub.telemetry import stats, telemetry
from datahub.utilities.groupby import groupby_unsorted
from datahub.utilities.perf_timer import PerfTimer

if TYPE_CHECKING:
Expand Down Expand Up @@ -868,7 +868,11 @@ def get_folder_info(
"""
partitions: List[Folder] = []
s3_objects = bucket.objects.filter(Prefix=prefix).page_size(PAGE_SIZE)
for key, group in group_s3_objects_by_dirname(s3_objects).items():
grouped_s3_objects_by_dirname = groupby_unsorted(
s3_objects,
key=lambda obj: obj.key.rsplit("/", 1)[0],
)
for key, group in grouped_s3_objects_by_dirname:
file_size = 0
creation_time = None
modification_time = None
Expand Down
29 changes: 0 additions & 29 deletions metadata-ingestion/tests/unit/s3/test_s3_util.py

This file was deleted.

0 comments on commit 3668dfc

Please sign in to comment.