Skip to content

Commit

Permalink
fixing docstrings
Browse files Browse the repository at this point in the history
  • Loading branch information
skarakuzu committed Jan 15, 2025
1 parent fa53707 commit 31504a4
Showing 1 changed file with 4 additions and 9 deletions.
13 changes: 4 additions & 9 deletions tiled/adapters/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ def write(
Parameters
----------
data : data to write into arrow file. Can be a list of record batch, or pandas dataframe.
table_name: string indicating the name of the table to ingest data in the database.
Returns
-------
"""
Expand Down Expand Up @@ -205,8 +204,8 @@ def write_partition(
"Function to write the data as arrow format."
Parameters
----------
partition : the partition index to write.
data : data to write into arrow file. Can be a list of record batch, or pandas dataframe.
table_name: string indicating the name of the table to ingest data in the database.
Returns
-------
"""
Expand Down Expand Up @@ -244,7 +243,7 @@ def append_partition(
Parameters
----------
data : data to append into the database. Can be a list of record batch, or pandas dataframe.
table_name: string indicating the name of the table to ingest data in the database.
partition : the partition index to write.
Returns
-------
"""
Expand Down Expand Up @@ -273,12 +272,11 @@ def append(
data: Union[List[pyarrow.record_batch], pyarrow.record_batch, pandas.DataFrame],
) -> None:
"""
"Function to write the data as arrow format."
"Function to append the data as arrow format."
Parameters
----------
data : data to append into the database. Can be a list of record batch, or pandas dataframe.
table_name: string indicating the name of the table to ingest data in the database.
Returns
-------
"""
Expand All @@ -305,8 +303,6 @@ def read(self, fields: Optional[Union[str, List[str]]] = None) -> pandas.DataFra
The concatenated data from given set of partitions as pyarrow table.
Parameters
----------
table_schema: hashed string or list of strings as column names to be hashed.
for example table_schema = ['f0', 'f1', 'f2'] or '3d51c6b180b64bea848f23e5crd91ea3'
fields: optional string to return the data in the specified field.
Returns
-------
Expand All @@ -328,8 +324,7 @@ def read_partition(self, partition: int, fields: Optional[Union[str, List[str]]]
The concatenated data from given set of partitions as pyarrow table.
Parameters
----------
table_schema: hashed string or list of strings as column names to be hashed.
for example table_schema = ['f0', 'f1', 'f2'] or '3d51c6b180b64bea848f23e5crd91ea3'
partition : the partition index to write.
fields: optional string to return the data in the specified field.
Returns
-------
Expand Down

0 comments on commit 31504a4

Please sign in to comment.