Skip to content

Commit

Permalink
Fix spelling (apache#22054)
Browse files Browse the repository at this point in the history
  • Loading branch information
jbampton authored Mar 7, 2022
1 parent 3f18ffc commit 6126c4e
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
2 changes: 1 addition & 1 deletion airflow/models/mappedoperator.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def validate_mapping_kwargs(op: Type["BaseOperator"], func: ValidationSource, va
error = f"{op.__name__}.apply() got an unexpected type {type_name!r} for keyword argument {name}"
raise ValueError(error)
if not unknown_args:
return # If we have no args left ot check: stop looking at the MRO chian.
return # If we have no args left to check: stop looking at the MRO chain.

if len(unknown_args) == 1:
error = f"an unexpected keyword argument {unknown_args.popitem()[0]!r}"
Expand Down
8 changes: 4 additions & 4 deletions airflow/providers/yandex/operators/yandexcloud_dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ class DataprocCreateHiveJobOperator(BaseOperator):
:param continue_on_failure: Whether to continue executing queries if a query fails.
:param name: Name of the job. Used for labeling.
:param cluster_id: ID of the cluster to run job in.
Will try to take the ID from Dataproc Hook object if ot specified. (templated)
Will try to take the ID from Dataproc Hook object if it's specified. (templated)
:param connection_id: ID of the Yandex.Cloud Airflow connection.
"""

Expand Down Expand Up @@ -282,7 +282,7 @@ class DataprocCreateMapReduceJobOperator(BaseOperator):
:param args: Arguments to be passed to the job.
:param name: Name of the job. Used for labeling.
:param cluster_id: ID of the cluster to run job in.
Will try to take the ID from Dataproc Hook object if ot specified. (templated)
Will try to take the ID from Dataproc Hook object if it's specified. (templated)
:param connection_id: ID of the Yandex.Cloud Airflow connection.
"""

Expand Down Expand Up @@ -349,7 +349,7 @@ class DataprocCreateSparkJobOperator(BaseOperator):
:param args: Arguments to be passed to the job.
:param name: Name of the job. Used for labeling.
:param cluster_id: ID of the cluster to run job in.
Will try to take the ID from Dataproc Hook object if ot specified. (templated)
Will try to take the ID from Dataproc Hook object if it's specified. (templated)
:param connection_id: ID of the Yandex.Cloud Airflow connection.
:param packages: List of maven coordinates of jars to include on the driver and executor classpaths.
:param repositories: List of additional remote repositories to search for the maven coordinates
Expand Down Expand Up @@ -430,7 +430,7 @@ class DataprocCreatePysparkJobOperator(BaseOperator):
:param args: Arguments to be passed to the job.
:param name: Name of the job. Used for labeling.
:param cluster_id: ID of the cluster to run job in.
Will try to take the ID from Dataproc Hook object if ot specified. (templated)
Will try to take the ID from Dataproc Hook object if it's specified. (templated)
:param connection_id: ID of the Yandex.Cloud Airflow connection.
:param packages: List of maven coordinates of jars to include on the driver and executor classpaths.
:param repositories: List of additional remote repositories to search for the maven coordinates
Expand Down
2 changes: 1 addition & 1 deletion docs/apache-airflow/howto/set-up-database.rst
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ in the Postgres documentation to learn more.

.. warning::

When you use SQLAlchemy 1.4.0+, you need ot use ``postgresql://`` as the database in the ``sql_alchemy_conn``.
When you use SQLAlchemy 1.4.0+, you need to use ``postgresql://`` as the database in the ``sql_alchemy_conn``.
In the previous versions of SQLAlchemy it was possible to use ``postgres://``, but using it in
SQLAlchemy 1.4.0+ results in:

Expand Down

0 comments on commit 6126c4e

Please sign in to comment.