Skip to content

Commit

Permalink
fixing documentation errors (#405)
Browse files Browse the repository at this point in the history
* fixing documentation errors

* added flag to treat warnings as errors

* fixed documentation errors

* Made autodoc generation for dataclasses respect order of arguments
  • Loading branch information
Fl4m3Ph03n1x authored Apr 29, 2022
1 parent 17f41b9 commit 13efb48
Show file tree
Hide file tree
Showing 24 changed files with 273 additions and 261 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/document.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
run: |
rm -rf docs/*
sphinx-apidoc -f -o source darwin
sphinx-build -b html source/ docs/
sphinx-build -b html source/ docs/ -W
- name: Setup access to AWS
uses: aws-actions/configure-aws-credentials@v1
with:
Expand Down
2 changes: 2 additions & 0 deletions darwin/dataset/release.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ def parse_json(cls, dataset_slug: str, team_slug: str, payload: Dict[str, Any])
A Dictionary with the ``Release`` information. It must have a minimal format similar to:
.. code-block:: javascript
{
"version": "a_version",
"name": "a_name"
Expand All @@ -118,6 +119,7 @@ def parse_json(cls, dataset_slug: str, team_slug: str, payload: Dict[str, Any])
A more complete format for this parameter would be similar to:
.. code-block:: javascript
{
"version": "a_version",
"name": "a_name",
Expand Down
22 changes: 11 additions & 11 deletions darwin/dataset/remote_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,60 +434,60 @@ def fetch_remote_files(

def archive(self, items: Iterator[DatasetItem]) -> None:
"""
Archives (soft-deletion) the given ``DatasetItem``s belonging to this ``RemoteDataset``.
Archives (soft-deletion) the given ``DatasetItem``\\s belonging to this ``RemoteDataset``.
Parameters
----------
items : Iterator[DatasetItem]
The ``DatasetItem``s to be archived.
The ``DatasetItem``\\s to be archived.
"""
payload: Dict[str, Any] = {"filter": {"dataset_item_ids": [item.id for item in items]}}
self.client.archive_item(self.slug, self.team, payload)

def restore_archived(self, items: Iterator[DatasetItem]) -> None:
"""
Restores the archived ``DatasetItem``s that belong to this ``RemoteDataset``.
Restores the archived ``DatasetItem``\\s that belong to this ``RemoteDataset``.
Parameters
----------
items : Iterator[DatasetItem]
The ``DatasetItem``s to be restored.
The ``DatasetItem``\\s to be restored.
"""
payload: Dict[str, Any] = {"filter": {"dataset_item_ids": [item.id for item in items]}}
self.client.restore_archived_item(self.slug, self.team, payload)

def move_to_new(self, items: Iterator[DatasetItem]) -> None:
"""
Changes the given ``DatasetItem``s status to ``new``.
Changes the given ``DatasetItem``\\s status to ``new``.
Parameters
----------
items : Iterator[DatasetItem]
The ``DatasetItem``s whose status will change.
The ``DatasetItem``\\s whose status will change.
"""
payload: Dict[str, Any] = {"filter": {"dataset_item_ids": [item.id for item in items]}}
self.client.move_item_to_new(self.slug, self.team, payload)

def reset(self, items: Iterator[DatasetItem]) -> None:
"""
Resets the given ``DatasetItem``s.
Resets the given ``DatasetItem``\\s.
Parameters
----------
items : Iterator[DatasetItem]
The ``DatasetItem``s to be resetted.
The ``DatasetItem``\\s to be resetted.
"""
payload: Dict[str, Any] = {"filter": {"dataset_item_ids": [item.id for item in items]}}
self.client.reset_item(self.slug, self.team, payload)

def delete_items(self, items: Iterator[DatasetItem]) -> None:
"""
Deletes the given ``DatasetItem``s.
Deletes the given ``DatasetItem``\\s.
Parameters
----------
items : Iterator[DatasetItem]
The ``DatasetItem``s to be deleted.
The ``DatasetItem``\\s to be deleted.
"""
payload: Dict[str, Any] = {"filter": {"dataset_item_ids": [item.id for item in items]}}
self.client.delete_item(self.slug, self.team, payload)
Expand Down Expand Up @@ -701,7 +701,7 @@ def get_releases(self) -> List["Release"]:
Returns
-------
List["Release"]
Returns a sorted list of available ``Release``s with the most recent first.
Returns a sorted list of available ``Release``\\s with the most recent first.
"""
try:
releases_json: List[Dict[str, Any]] = self.client.get_exports(self.dataset_id, self.team)
Expand Down
15 changes: 7 additions & 8 deletions darwin/dataset/split_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class Split:
the ``Path`` of the file where that partition split file is going to be stored.
.. code-block:: python
{
"train": Path("/path/to/split/random_train.txt"),
"val": Path("/path/to/split/random_val.txt"),
Expand All @@ -31,6 +32,7 @@ class Split:
the ``Path`` of the file where that partition split file is going to be stored.
.. code-block:: python
{
"polygon": {
"train": Path("/path/to/split/stratified_polygon_train.txt"),
Expand All @@ -44,17 +46,14 @@ class Split:
}
}
Attributes
----------
random: Optional[Dict[str, Path]], default: None
Stores the type of split (e.g.: ``train``, ``val``, ``test``) and the file path where the
split is stored if the split is of type ``random``. Defaults to ``None``.
stratified: Optional[Dict[str, Dict[str, Path]]], default: None
Stores the relation between an annotation type and the partition-filepath key value of the
split if its type is ``startified``. Defauls to ``None``.
"""

#: Stores the type of split (e.g. ``train``, ``val``, ``test``) and the file path where the
#: split is stored if the split is of type ``random``.
random: Optional[Dict[str, Path]] = None

#: Stores the relation between an annotation type and the partition-filepath key value of the
#: split if its type is ``stratified``.
stratified: Optional[Dict[str, Dict[str, Path]]] = None

def is_valid(self) -> bool:
Expand Down
25 changes: 11 additions & 14 deletions darwin/dataset/upload_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,19 +83,15 @@ class UploadStage(DocEnum):
class UploadRequestError(Exception):
"""
Error throw when uploading a file fails with an unrecoverable error.
Attributes
----------
file_path : Path
The ``Path`` of the file being uploaded.
stage : UploadStage
The ``UploadStage`` when the failure happened.
error : Optional[Exception], default: None
The ``Exception`` that triggered this unrecoverable error.
"""

#: The ``Path`` of the file being uploaded.
file_path: Path

#: The ``UploadStage`` when the failure happened.
stage: UploadStage

#: The ``Exception`` that triggered this unrecoverable error.
error: Optional[Exception] = None


Expand All @@ -118,6 +114,7 @@ class LocalFile:
Dictionary with metadata relative to this file. It has the following format:
.. code-block:: python
{
"filename": "a_filename",
"path": "a path"
Expand Down Expand Up @@ -208,14 +205,14 @@ def read(self, size: int = -1) -> Any:

class UploadHandler:
"""
Holds responsabilities for file upload management and failure into ``RemoteDataset``s.
Holds responsabilities for file upload management and failure into ``RemoteDataset``\\s.
Parameters
----------
dataset: RemoteDataset
Target ``RemoteDataset`` where we want to upload our files to.
local_files : List[LocalFile]
List of ``LocalFile``s to be uploaded.
List of ``LocalFile``\\s to be uploaded.
Attributes
----------
Expand All @@ -224,7 +221,7 @@ class UploadHandler:
errors : List[UploadRequestError]
List of errors that happened during the upload process.
local_files : List[LocalFile]
List of ``LocalFile``s to be uploaded.
List of ``LocalFile``\\s to be uploaded.
blocked_items : List[ItemPayload]
List of items that were not able to be uploaded.
pending_items : List[ItemPayload]
Expand All @@ -241,12 +238,12 @@ def __init__(self, dataset: "RemoteDataset", local_files: List[LocalFile]):

@property
def client(self) -> "Client":
"""The ``Client`` used by this ``UploadHander``'s ``RemoteDataset``."""
"""The ``Client`` used by this ``UploadHander``\\'s ``RemoteDataset``."""
return self.dataset.client

@property
def dataset_identifier(self) -> "DatasetIdentifier":
"""The ``DatasetIdentifier`` of this ``UploadHander``'s ``RemoteDataset``."""
"""The ``DatasetIdentifier`` of this ``UploadHander``\\'s ``RemoteDataset``."""
return self.dataset.identifier

@property
Expand Down
1 change: 1 addition & 0 deletions darwin/dataset/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,7 @@ def get_coco_format_record(
A coco record with the following keys:
.. code-block:: python
{
"height": 100,
"width": 100,
Expand Down
Loading

0 comments on commit 13efb48

Please sign in to comment.