Skip to content

Commit

Permalink
v4.0.3 Release
Browse files Browse the repository at this point in the history
  • Loading branch information
khustup2 committed Nov 17, 2024
1 parent d6a60a7 commit 4e94d0d
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 4 deletions.
3 changes: 2 additions & 1 deletion python/deeplake/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import deeplake
from ._deeplake import *

__version__ = "4.0.2"
__version__ = "4.0.3"

__all__ = [
"__version__",
Expand Down Expand Up @@ -34,6 +34,7 @@
"ColumnAlreadyExistsError",
"ColumnDoesNotExistError",
"InvalidColumnValueError",
"PushError",
"GcsStorageProviderFailed",
"History",
"InvalidType",
Expand Down
82 changes: 79 additions & 3 deletions python/deeplake/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ __all__ = [
"ColumnAlreadyExistsError",
"ColumnDoesNotExistError",
"InvalidColumnValueError",
"PushError",
"GcsStorageProviderFailed",
"History",
"InvalidType",
Expand Down Expand Up @@ -380,6 +381,9 @@ class TagExistsError(Exception):
class CannotTagUncommittedDatasetError(Exception):
pass

class PushError(Exception):
pass

class Tags:
"""
Provides access to the tags within a dataset.
Expand Down Expand Up @@ -1155,7 +1159,7 @@ class Dataset(DatasetView):
"""Restores dataset from a pickled state.
Args:
state (dict): The pickled state used to restore the dataset.
arg0 (dict): The pickled state used to restore the dataset.
"""

def add_column(
Expand Down Expand Up @@ -1331,6 +1335,57 @@ class Dataset(DatasetView):
Asynchronously reverts any in-progress changes to the dataset you have made. Does not revert any changes that have been committed.
"""

def push(self, url: str, creds: dict[str, str] | None = None, token: str | None = None) -> None:
"""
Pushes any new history from this dataset to the dataset at the given url
Similar to [deeplake.Dataset.pull][] but the other direction.
Parameters:
url: The URL of the destination dataset
creds: Optional credentials needed to connect to the dataset
token: Optional deeplake token
"""
...
def push_async(self, url: str, creds: dict[str, str] | None = None, token: str | None = None) -> FutureVoid:
"""
Asynchronously Pushes new any history from this dataset to the dataset at the given url
Similar to [deeplake.Dataset.pull_async][] but the other direction.
Parameters:
url: The URL of the destination dataset
creds: Optional credentials needed to connect to the dataset
token: Optional deeplake token
"""
...

def pull(self, url: str, creds: dict[str, str] | None = None, token: str | None = None) -> None:
"""
Pulls any new history from the dataset at the passed url into this dataset.
Similar to [deeplake.Dataset.push][] but the other direction.
Parameters:
url: The URL of the destination dataset
creds: Optional credentials needed to connect to the dataset
token: Optional deeplake token
"""
...
def pull_async(self, url: str, creds: dict[str, str] | None = None, token: str | None = None) -> FutureVoid:
"""
Asynchronously pulls any new history from the dataset at the passed url into this dataset.
Similar to [deeplake.Dataset.push_async][] but the other direction.
Parameters:
url: The URL of the destination dataset
creds: Optional credentials needed to connect to the dataset
token: Optional deeplake token
"""
...


@property
def history(self) -> History:
"""
Expand Down Expand Up @@ -1406,7 +1461,7 @@ class ReadOnlyDataset(DatasetView):
@property
def tags(self) -> TagsView:
"""
The collection of [deeplake.TagsView][] within the dataset
The collection of [deeplake.TagView][]s within the dataset
"""
...

Expand Down Expand Up @@ -1474,6 +1529,27 @@ class ReadOnlyDataset(DatasetView):
"""
...

def push(self, url: str, creds: dict[str, str] | None = None, token: str | None = None) -> None:
"""
Pushes any history from this dataset to the dataset at the given url
Parameters:
url: The URL of the destination dataset
creds: Optional credentials needed to connect to the dataset
token: Optional deeplake token
"""
...
def push_async(self, url: str, creds: dict[str, str] | None = None, token: str | None = None) -> FutureVoid:
"""
Asynchronously Pushes any history from this dataset to the dataset at the given url
Parameters:
url: The URL of the destination dataset
creds: Optional credentials needed to connect to the dataset
token: Optional deeplake token
"""
...

def __getstate__(self) -> tuple:
"""Returns a dict that can be pickled and used to restore this dataset.
Expand Down Expand Up @@ -1803,7 +1879,7 @@ def create(url: str, creds: dict[str, str] | None = None, token: str | None = No
- To use credentials managed in your Activeloop organization, use they key 'creds_key': 'managed_key_name'. This requires the org_id dataset argument to be set.
- If nothing is given is, credentials are fetched from the environment variables. This is also the case when creds is not passed for cloud datasets
token (str, optional): Activeloop token, used for fetching credentials to the dataset at path if it is a Deep Lake dataset. This is optional, tokens are normally autogenerated.
schema (dict): The initial schema to use for the dataset. See `deeplake.schema` such as [deeplake.schemas.TextDocuments][] for common starting schemas.
schema (dict): The initial schema to use for the dataset. See `deeplake.schema` such as [deeplake.schemas.TextEmbeddings][] for common starting schemas.
Examples:
>>> import deeplake
Expand Down

0 comments on commit 4e94d0d

Please sign in to comment.