Skip to content

Commit

Permalink
Completed D400 for multiple folders (apache#27722)
Browse files Browse the repository at this point in the history
  • Loading branch information
bdsoha authored Nov 16, 2022
1 parent c609477 commit 7cfa1be
Show file tree
Hide file tree
Showing 11 changed files with 42 additions and 21 deletions.
3 changes: 3 additions & 0 deletions airflow/datasets/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ def register_dataset_change(
self, *, task_instance: TaskInstance, dataset: Dataset, extra=None, session: Session, **kwargs
) -> None:
"""
Register dataset related changes.
For local datasets, look them up, record the dataset event, queue dagruns, and broadcast
the dataset event
"""
Expand Down Expand Up @@ -107,6 +109,7 @@ def _postgres_queue_dagruns(self, dataset: DatasetModel, session: Session) -> No


def resolve_dataset_manager() -> DatasetManager:
"""Retrieve the dataset manager."""
_dataset_manager_class = conf.getimport(
section="core",
key="dataset_manager_class",
Expand Down
5 changes: 3 additions & 2 deletions airflow/decorators/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,9 @@ def get_unique_task_id(
task_group: TaskGroup | None = None,
) -> str:
"""
Generate unique task id given a DAG (or if run in a DAG context)
Ids are generated by appending a unique number to the end of
Generate unique task id given a DAG (or if run in a DAG context).
IDs are generated by appending a unique number to the end of
the original task id.
Example:
Expand Down
3 changes: 2 additions & 1 deletion airflow/decorators/branch_python.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ def branch_task(
python_callable: Callable | None = None, multiple_outputs: bool | None = None, **kwargs
) -> TaskDecorator:
"""
Wraps a python function into a BranchPythonOperator
Wraps a python function into a BranchPythonOperator.
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:BranchPythonOperator`
Expand Down
10 changes: 6 additions & 4 deletions airflow/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
# Note: Any AirflowException raised is expected to cause the TaskInstance
# to be marked in an ERROR state
"""Exceptions used by Airflow"""
"""Exceptions used by Airflow."""
from __future__ import annotations

import datetime
Expand Down Expand Up @@ -84,7 +84,7 @@ class AirflowWebServerTimeout(AirflowException):


class AirflowSkipException(AirflowException):
"""Raise when the task should be skipped"""
"""Raise when the task should be skipped."""


class AirflowFailException(AirflowException):
Expand Down Expand Up @@ -229,7 +229,7 @@ class SerializationError(AirflowException):


class ParamValidationError(AirflowException):
"""Raise when DAG params is invalid"""
"""Raise when DAG params is invalid."""


class TaskNotFound(AirflowNotFoundException):
Expand Down Expand Up @@ -317,6 +317,8 @@ class ConnectionNotUnique(AirflowException):

class TaskDeferred(BaseException):
"""
Signal an operator moving to deferred state.
Special exception raised to signal that the operator it was raised from
wishes to defer until a trigger fires.
"""
Expand Down Expand Up @@ -347,7 +349,7 @@ class TaskDeferralError(AirflowException):


class PodMutationHookException(AirflowException):
"""Raised when exception happens during Pod Mutation Hook execution"""
"""Raised when exception happens during Pod Mutation Hook execution."""


class PodReconciliationError(AirflowException):
Expand Down
8 changes: 6 additions & 2 deletions airflow/hooks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Base class for all hooks"""
"""Base class for all hooks."""
from __future__ import annotations

import logging
Expand All @@ -34,7 +34,9 @@

class BaseHook(LoggingMixin):
"""
Abstract base class for hooks, hooks are meant as an interface to
Abstract base class for hooks.
Hooks are meant as an interface to
interact with external systems. MySqlHook, HiveHook, PigHook return
object that can handle the connection and interaction to specific
instances of these systems, and expose consistent methods to interact
Expand Down Expand Up @@ -160,6 +162,8 @@ def get_connection_form_widgets() -> dict[str, Any]:
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""
Attributes of the UI field.
Returns dictionary describing customizations to implement in javascript handling the
connection form. Should be compliant with airflow/customized_form_field_behaviours.schema.json'
Expand Down
2 changes: 1 addition & 1 deletion airflow/hooks/subprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@


class SubprocessHook(BaseHook):
"""Hook for running processes with the ``subprocess`` module"""
"""Hook for running processes with the ``subprocess`` module."""

def __init__(self) -> None:
self.sub_process: Popen[bytes] | None = None
Expand Down
10 changes: 5 additions & 5 deletions airflow/macros/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

def ds_add(ds: str, days: int) -> str:
"""
Add or subtract days from a YYYY-MM-DD
Add or subtract days from a YYYY-MM-DD.
:param ds: anchor date in ``YYYY-MM-DD`` format to add to
:param days: number of days to add to the ds, you can use negative values
Expand All @@ -49,8 +49,7 @@ def ds_add(ds: str, days: int) -> str:

def ds_format(ds: str, input_format: str, output_format: str) -> str:
"""
Takes an input string and outputs another string
as specified in the output format
Output datetime string in a given format.
:param ds: input string which contains a date
:param input_format: input string format. E.g. %Y-%m-%d
Expand All @@ -66,8 +65,9 @@ def ds_format(ds: str, input_format: str, output_format: str) -> str:

def datetime_diff_for_humans(dt: Any, since: DateTime | None = None) -> str:
"""
Return a human-readable/approximate difference between two datetimes, or
one and now.
Return a human-readable/approximate difference between datetimes.
When only one datetime is provided, the comparison will be based on now.
:param dt: The datetime to display the diff for
:param since: When to display the date from. If ``None`` then the diff is
Expand Down
9 changes: 6 additions & 3 deletions airflow/security/kerberos.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Kerberos security provider"""
"""Kerberos security provider."""
import logging
import shlex
import subprocess
Expand All @@ -49,7 +49,7 @@

def renew_from_kt(principal: str | None, keytab: str, exit_on_fail: bool = True):
"""
Renew kerberos token from keytab
Renew kerberos token from keytab.
:param principal: principal
:param keytab: keytab file
Expand Down Expand Up @@ -159,7 +159,10 @@ def perform_krb181_workaround(principal: str):


def detect_conf_var() -> bool:
"""Return true if the ticket cache contains "conf" information as is found
"""
Autodetect the Kerberos ticket configuration.
Return true if the ticket cache contains "conf" information as is found
in ticket caches of Kerberos 1.8.1 or later. This is incompatible with the
Sun Java Krb5LoginModule in Java6, so we need to take an action to work
around it.
Expand Down
5 changes: 3 additions & 2 deletions airflow/timetables/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,9 @@

class EventsTimetable(Timetable):
"""
Timetable that schedules DAG runs at specific listed datetimes. Suitable for
predictable but truly irregular scheduling such as sporting events.
Timetable that schedules DAG runs at specific listed datetimes.
Suitable for predictable but truly irregular scheduling such as sporting events.
:param event_dates: List of datetimes for the DAG to run at. Duplicates will be ignored. Must be finite
and of reasonable size as it will be loaded in its entirety.
Expand Down
4 changes: 3 additions & 1 deletion airflow/triggers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,9 @@ async def run(self) -> AsyncIterator["TriggerEvent"]:

def cleanup(self) -> None:
"""
Called when the trigger is no longer needed and it's being removed
Cleanup the trigger.
Called when the trigger is no longer needed, and it's being removed
from the active triggerer process.
"""

Expand Down
4 changes: 4 additions & 0 deletions airflow/triggers/temporal.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@

class DateTimeTrigger(BaseTrigger):
"""
Trigger based on a datetime.
A trigger that fires exactly once, at the given datetime, give or take
a few seconds.
Expand Down Expand Up @@ -68,6 +70,8 @@ async def run(self):

class TimeDeltaTrigger(DateTimeTrigger):
"""
Create DateTimeTriggers based on delays.
Subclass to create DateTimeTriggers based on time delays rather
than exact moments.
Expand Down

0 comments on commit 7cfa1be

Please sign in to comment.