Skip to content

Commit

Permalink
cleaned the files
Browse files Browse the repository at this point in the history
  • Loading branch information
Seher Karakuzu authored and Seher Karakuzu committed Mar 8, 2024
1 parent 2616a04 commit 5219576
Show file tree
Hide file tree
Showing 4 changed files with 0 additions and 19 deletions.
3 changes: 0 additions & 3 deletions tiled/adapters/csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,6 @@ def __init__(
specs=None,
access_policy=None,
):
#if not isinstance(data_uris, list):
# data_uris = [data_uris]

# TODO Store data_uris instead and generalize to non-file schemes.
self._partition_paths = [path_from_uri(uri) for uri in data_uris]
self._metadata = metadata or {}
Expand Down
1 change: 0 additions & 1 deletion tiled/catalog/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -915,7 +915,6 @@ async def keys_range(self, offset, limit):

async def items_range(self, offset, limit):
if self.data_sources:
print("HEREEEE TYPE OF GET ADAPTOR", self.get_adapter(), type(self.get_adapter()))
return it.islice(
(await self.get_adapter()).items(),
offset,
Expand Down
11 changes: 0 additions & 11 deletions tiled/client/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -594,17 +594,6 @@ def new(
write_dataframe
write_coo_array
"""
#print("WHAT THE DATASOURCE", type(data_sources), data_sources)


# There is some issue about type of data_sources. In the test it came out as tuple of lists
#if isinstance(data_sources, tuple):
# print("TYPE OF DATASOURCES", type(data_sources))
# data_sources = data_sources[0]

#for data_source in data_sources:
# print("WHAT THE DATASOURCE", type(data_source), data_source.assets)

self._cached_len = None
metadata = metadata or {}
specs = specs or []
Expand Down
4 changes: 0 additions & 4 deletions tiled/client/register.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,6 @@ async def register_single_item(
mimetype = resolve_mimetype(
item, settings.mimetypes_by_file_ext, settings.mimetype_detection_hook
)
print("EVER HEREEEEEEEEEEEEEE", f"{mimetype=}")
if mimetype is None:
unhandled_items.append(item)
if not is_directory:
Expand All @@ -304,11 +303,8 @@ async def register_single_item(
key = settings.key_from_filename(item.name)
if hasattr(adapter, "generate_data_sources"):
# Let the Adapter describe the DataSouce(s).
print("XXXXXXXXXXXXXXXbefore ")
data_sources = adapter.generate_data_sources(mimetype, dict_or_none, item, is_directory)
print("XXXXXXXXXXXXXXXXXafter ")
else:
print(" XXXXXXXXXXXXXXXxin the else")
# Back-compat: Assume one Asset passed as a
# parameter named 'data_uri'.
data_sources = [
Expand Down

0 comments on commit 5219576

Please sign in to comment.