Skip to content

Commit

Permalink
infra: rm unused # noqa violations (langchain-ai#22049)
Browse files Browse the repository at this point in the history
  • Loading branch information
baskaryan authored May 22, 2024
1 parent 45ed5f3 commit 50186da
Show file tree
Hide file tree
Showing 149 changed files with 212 additions and 214 deletions.
2 changes: 1 addition & 1 deletion .github/scripts/check_diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,4 +91,4 @@
}
for key, value in outputs.items():
json_output = json.dumps(value)
print(f"{key}={json_output}") # noqa: T201
print(f"{key}={json_output}")
2 changes: 1 addition & 1 deletion .github/scripts/get_min_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,4 +76,4 @@ def get_min_version_from_toml(toml_path: str):

print(
" ".join([f"{lib}=={version}" for lib, version in min_versions.items()])
) # noqa: T201
)
2 changes: 1 addition & 1 deletion .github/workflows/extract_ignored_words_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
pyproject_toml.get("tool", {}).get("codespell", {}).get("ignore-words-list")
)

print(f"::set-output name=ignore_words_list::{ignore_words_list}") # noqa: T201
print(f"::set-output name=ignore_words_list::{ignore_words_list}")
4 changes: 1 addition & 3 deletions cookbook/rag_with_quantized_embeddings.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,10 @@
"from langchain_community.document_loaders.recursive_url_loader import (\n",
" RecursiveUrlLoader,\n",
")\n",
"\n",
"# noqa\n",
"from langchain_community.vectorstores import Chroma\n",
"\n",
"# For our example, we'll load docs from the web\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter # noqa\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter\n",
"\n",
"DOCSTORE_DIR = \".\"\n",
"DOCSTORE_ID_KEY = \"doc_id\""
Expand Down
2 changes: 1 addition & 1 deletion docs/api_reference/create_api_rst.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def _load_package_modules(
modules_by_namespace[top_namespace] = _module_members

except ImportError as e:
print(f"Error: Unable to import module '{namespace}' with error: {e}") # noqa: T201
print(f"Error: Unable to import module '{namespace}' with error: {e}")

return modules_by_namespace

Expand Down
4 changes: 2 additions & 2 deletions docs/docs/how_to/graph_mapping.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@
"Entities in the question map to the following database values:\n",
"{entities_list}\n",
"Question: {question}\n",
"Cypher query:\"\"\" # noqa: E501\n",
"Cypher query:\"\"\"\n",
"\n",
"cypher_prompt = ChatPromptTemplate.from_messages(\n",
" [\n",
Expand Down Expand Up @@ -377,7 +377,7 @@
"response_template = \"\"\"Based on the the question, Cypher query, and Cypher response, write a natural language response:\n",
"Question: {question}\n",
"Cypher query: {query}\n",
"Cypher Response: {response}\"\"\" # noqa: E501\n",
"Cypher Response: {response}\"\"\"\n",
"\n",
"response_prompt = ChatPromptTemplate.from_messages(\n",
" [\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how_to/sql_csv.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@
}
],
"source": [
"chain = prompt | llm_with_tools | parser | tool # noqa\n",
"chain = prompt | llm_with_tools | parser | tool\n",
"chain.invoke({\"question\": \"What's the correlation between age and fare\"})"
]
},
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/how_to/sql_large_db.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@
" return tables\n",
"\n",
"\n",
"table_chain = category_chain | get_tables # noqa\n",
"table_chain = category_chain | get_tables\n",
"table_chain.invoke({\"input\": \"What are all the genres of Alanis Morisette songs\"})"
]
},
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/integrations/chat/anthropic.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -670,7 +670,7 @@
" \"type\": \"image_url\",\n",
" \"image_url\": {\n",
" # langchain logo\n",
" \"url\": f\"data:image/png;base64,{img_base64}\", # noqa: E501\n",
" \"url\": f\"data:image/png;base64,{img_base64}\",\n",
" },\n",
" },\n",
" {\"type\": \"text\", \"text\": \"What is this logo for?\"},\n",
Expand Down
4 changes: 2 additions & 2 deletions docs/docs/integrations/chat_loaders/discord.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@
" \"\"\"\n",
" self.path = path\n",
" self._message_line_regex = re.compile(\n",
" r\"(.+?) — (\\w{3,9} \\d{1,2}(?:st|nd|rd|th)?(?:, \\d{4})? \\d{1,2}:\\d{2} (?:AM|PM)|Today at \\d{1,2}:\\d{2} (?:AM|PM)|Yesterday at \\d{1,2}:\\d{2} (?:AM|PM))\", # noqa\n",
" r\"(.+?) — (\\w{3,9} \\d{1,2}(?:st|nd|rd|th)?(?:, \\d{4})? \\d{1,2}:\\d{2} (?:AM|PM)|Today at \\d{1,2}:\\d{2} (?:AM|PM)|Yesterday at \\d{1,2}:\\d{2} (?:AM|PM))\",\n",
" flags=re.DOTALL,\n",
" )\n",
"\n",
Expand All @@ -120,7 +120,7 @@
" current_content = []\n",
" for line in lines:\n",
" if re.match(\n",
" r\".+? — (\\d{2}/\\d{2}/\\d{4} \\d{1,2}:\\d{2} (?:AM|PM)|Today at \\d{1,2}:\\d{2} (?:AM|PM)|Yesterday at \\d{1,2}:\\d{2} (?:AM|PM))\", # noqa\n",
" r\".+? — (\\d{2}/\\d{2}/\\d{4} \\d{1,2}:\\d{2} (?:AM|PM)|Today at \\d{1,2}:\\d{2} (?:AM|PM)|Yesterday at \\d{1,2}:\\d{2} (?:AM|PM))\",\n",
" line,\n",
" ):\n",
" if current_sender and current_content:\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/integrations/chat_loaders/wechat.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@
" \"\"\"\n",
" self.path = path\n",
" self._message_line_regex = re.compile(\n",
" r\"(?P<sender>.+?) (?P<timestamp>\\d{4}/\\d{2}/\\d{2} \\d{1,2}:\\d{2} (?:AM|PM))\", # noqa\n",
" r\"(?P<sender>.+?) (?P<timestamp>\\d{4}/\\d{2}/\\d{2} \\d{1,2}:\\d{2} (?:AM|PM))\",\n",
" # flags=re.DOTALL,\n",
" )\n",
"\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ def __init__(self, name):
self.name = name

def greet(self):
print(f"Hello, {self.name}!") # noqa: T201
print(f"Hello, {self.name}!")


def main():
Expand Down
2 changes: 1 addition & 1 deletion docs/scripts/arxiv_references.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ def get_papers(
Returns:
List of ArxivPaper objects.
""" # noqa: E501
"""

def cut_authors(authors: list) -> list[str]:
if len(authors) > 3:
Expand Down
2 changes: 1 addition & 1 deletion docs/scripts/generate_api_reference_links.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def replacer(match):
data = code_block_re.sub(replacer, data)

# if all_imports:
# print(f"Adding {len(all_imports)} links for imports in {file}") # noqa: T201
# print(f"Adding {len(all_imports)} links for imports in {file}")
with open(file, "w") as f:
f.write(data)
return all_imports
Expand Down
4 changes: 2 additions & 2 deletions docs/scripts/model_feat_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@
{table}
""" # noqa: E501
"""

CHAT_MODEL_TEMPLATE = """\
---
Expand All @@ -133,7 +133,7 @@
{table}
""" # noqa: E501
"""


def get_llm_table():
Expand Down
2 changes: 1 addition & 1 deletion libs/cli/tests/unit_tests/migrate/test_replace_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"langchain_to_core",
"community_to_core",
]
) # type: ignore[attr-defined] # noqa: E501
) # type: ignore[attr-defined]


class TestReplaceImportsCommand(CodemodTest):
Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_community/callbacks/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
wandb_tracing_callback_var: ContextVar[Optional[WandbTracer]] = ContextVar(
"tracing_wandb_callback", default=None
)
comet_tracing_callback_var: ContextVar[Optional[CometTracer]] = ContextVar( # noqa: E501
comet_tracing_callback_var: ContextVar[Optional[CometTracer]] = ContextVar(
"tracing_comet_callback", default=None
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def lazy_load(self) -> Iterator[ChatSession]:
:return: Iterator of chat sessions containing messages.
"""
from langchain_community.adapters import openai as oai_adapter # noqa: E402
from langchain_community.adapters import openai as oai_adapter

data = self.client.read_dataset_openai_finetuning(
dataset_name=self.dataset_name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def __init__(

self.messages: List[BaseMessage] = []
try:
from azure.cosmos import ( # pylint: disable=import-outside-toplevel # noqa: E501
from azure.cosmos import ( # pylint: disable=import-outside-toplevel
CosmosClient,
)
except ImportError as exc:
Expand Down Expand Up @@ -91,7 +91,7 @@ def prepare_cosmos(self) -> None:
Use this function or the context manager to make sure your database is ready.
"""
try:
from azure.cosmos import ( # pylint: disable=import-outside-toplevel # noqa: E501
from azure.cosmos import ( # pylint: disable=import-outside-toplevel
PartitionKey,
)
except ImportError as exc:
Expand Down Expand Up @@ -128,7 +128,7 @@ def load_messages(self) -> None:
if not self._container:
raise ValueError("Container not initialized")
try:
from azure.cosmos.exceptions import ( # pylint: disable=import-outside-toplevel # noqa: E501
from azure.cosmos.exceptions import ( # pylint: disable=import-outside-toplevel
CosmosHttpResponseError,
)
except ImportError as exc:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ class AzureChatOpenAI(ChatOpenAI):
For more:
https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id.
""" # noqa: E501
"""
azure_ad_token_provider: Union[Callable[[], str], None] = None
"""A function that returns an Azure Active Directory token.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ class AzureMLChatOnlineEndpoint(BaseChatModel, AzureMLBaseEndpoint):
endpoint_api_key="my-api-key",
content_formatter=chat_content_formatter,
)
""" # noqa: E501
"""

@property
def _identifying_params(self) -> Dict[str, Any]:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@
GitHubIssuesLoader,
)
from langchain_community.document_loaders.glue_catalog import (
GlueCatalogLoader, # noqa: F401
GlueCatalogLoader,
)
from langchain_community.document_loaders.google_speech_to_text import (
GoogleSpeechToTextLoader,
Expand Down Expand Up @@ -332,8 +332,8 @@
OracleAutonomousDatabaseLoader,
)
from langchain_community.document_loaders.oracleai import (
OracleDocLoader, # noqa: F401
OracleTextSplitter, # noqa: F401
OracleDocLoader,
OracleTextSplitter,
)
from langchain_community.document_loaders.org_mode import (
UnstructuredOrgModeLoader,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class BlackboardLoader(WebBaseLoader):
)
documents = loader.load()
""" # noqa: E501
"""

def __init__(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class EverNoteLoader(BaseLoader):
notes into a single long Document.
If this is set to True (default) then the only metadata on the document will be
the 'source' which contains the file name of the export.
""" # noqa: E501
"""

def __init__(self, file_path: Union[str, Path], load_single_document: bool = True):
"""Initialize with file path."""
Expand Down
6 changes: 3 additions & 3 deletions libs/community/langchain_community/document_loaders/pebblo.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,9 @@ def _classify_doc(self, loaded_docs: list, loading_end: bool = False) -> list:
if loading_end is True:
payload["loading_end"] = "true"
if "loader_details" in payload:
payload["loader_details"]["source_aggregate_size"] = ( # noqa
self.source_aggregate_size
)
payload["loader_details"][
"source_aggregate_size"
] = self.source_aggregate_size
payload = Doc(**payload).dict(exclude_unset=True)
load_doc_url = f"{self.classifier_url}{LOADER_DOC_URL}"
classified_docs = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@
"GoogleTranslateTransformer": "langchain_community.document_transformers.google_translate", # noqa: E501
"Html2TextTransformer": "langchain_community.document_transformers.html2text",
"LongContextReorder": "langchain_community.document_transformers.long_context_reorder", # noqa: E501
"MarkdownifyTransformer": "langchain_community.document_transformers.markdownify", # noqa: E501
"MarkdownifyTransformer": "langchain_community.document_transformers.markdownify",
"NucliaTextTransformer": "langchain_community.document_transformers.nuclia_text_transform", # noqa: E501
"OpenAIMetadataTagger": "langchain_community.document_transformers.openai_functions", # noqa: E501
"get_stateful_documents": "langchain_community.document_transformers.embeddings_redundant_filter", # noqa: E501
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def transform_documents(
new_documents = []

for document in documents:
extracted_metadata: Dict = self.tagging_chain.run(document.page_content) # type: ignore[assignment] # noqa: E501
extracted_metadata: Dict = self.tagging_chain.run(document.page_content) # type: ignore[assignment]
new_document = Document(
page_content=document.page_content,
metadata={**extracted_metadata, **document.metadata},
Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_community/embeddings/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@
QuantizedBiEncoderEmbeddings,
)
from langchain_community.embeddings.oracleai import (
OracleEmbeddings, # noqa: F401
OracleEmbeddings,
)
from langchain_community.embeddings.premai import (
PremAIEmbeddings,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class AzureOpenAIEmbeddings(OpenAIEmbeddings):
For more:
https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id.
""" # noqa: E501
"""
azure_ad_token_provider: Union[Callable[[], str], None] = None
"""A function that returns an Azure Active Directory token.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class ElasticsearchEmbeddings(Embeddings):
In Elasticsearch you need to have an embedding model loaded and deployed.
- https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-trained-model.html
- https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-deploy-models.html
""" # noqa: E501
"""

def __init__(
self,
Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_community/embeddings/infinity.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def _permute(
length_sorted_idx = np.argsort([-sorter(sen) for sen in texts])
texts_sorted = [texts[idx] for idx in length_sorted_idx]

return texts_sorted, lambda unsorted_embeddings: [ # noqa E731
return texts_sorted, lambda unsorted_embeddings: [ # E731
unsorted_embeddings[idx] for idx in np.argsort(length_sorted_idx)
]

Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_community/embeddings/localai.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ def _invocation_params(self) -> Dict:
openai.proxy = {
"http": self.openai_proxy,
"https": self.openai_proxy,
} # type: ignore[assignment] # noqa: E501
} # type: ignore[assignment]
return openai_args

def _embedding_func(self, text: str, *, engine: str) -> List[float]:
Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_community/embeddings/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ def _invocation_params(self) -> Dict[str, Any]:
openai.proxy = {
"http": self.openai_proxy,
"https": self.openai_proxy,
} # type: ignore[assignment] # noqa: E501
} # type: ignore[assignment]
return openai_args

# please refer to
Expand Down
4 changes: 2 additions & 2 deletions libs/community/langchain_community/embeddings/yandex.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,13 +107,13 @@ def validate_environment(cls, values: Dict) -> Dict:
raise ValueError("'doc_model_uri' or 'folder_id' must be provided.")
values[
"doc_model_uri"
] = f"emb://{values['folder_id']}/{values['doc_model_name']}/{values['model_version']}" # noqa: E501
] = f"emb://{values['folder_id']}/{values['doc_model_name']}/{values['model_version']}"
if not values.get("model_uri"):
if values["folder_id"] == "":
raise ValueError("'model_uri' or 'folder_id' must be provided.")
values[
"model_uri"
] = f"emb://{values['folder_id']}/{values['model_name']}/{values['model_version']}" # noqa: E501
] = f"emb://{values['folder_id']}/{values['model_name']}/{values['model_version']}"
if values["disable_request_logging"]:
values["_grpc_metadata"].append(
(
Expand Down
8 changes: 4 additions & 4 deletions libs/community/langchain_community/graphs/arangodb_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,9 +174,9 @@ def get_arangodb_client(
"Unable to import arango, please install with `pip install python-arango`."
) from e

_url: str = url or os.environ.get("ARANGODB_URL", "http://localhost:8529") # type: ignore[assignment] # noqa: E501
_dbname: str = dbname or os.environ.get("ARANGODB_DBNAME", "_system") # type: ignore[assignment] # noqa: E501
_username: str = username or os.environ.get("ARANGODB_USERNAME", "root") # type: ignore[assignment] # noqa: E501
_password: str = password or os.environ.get("ARANGODB_PASSWORD", "") # type: ignore[assignment] # noqa: E501
_url: str = url or os.environ.get("ARANGODB_URL", "http://localhost:8529") # type: ignore[assignment]
_dbname: str = dbname or os.environ.get("ARANGODB_DBNAME", "_system") # type: ignore[assignment]
_username: str = username or os.environ.get("ARANGODB_USERNAME", "root") # type: ignore[assignment]
_password: str = password or os.environ.get("ARANGODB_PASSWORD", "") # type: ignore[assignment]

return ArangoClient(_url).db(_dbname, _username, _password, verify=True)
Original file line number Diff line number Diff line change
Expand Up @@ -496,7 +496,7 @@ class AzureMLOnlineEndpoint(BaseLLM, AzureMLBaseEndpoint):
timeout=120,
content_formatter=content_formatter,
)
""" # noqa: E501
"""

@property
def _identifying_params(self) -> Mapping[str, Any]:
Expand Down
Loading

0 comments on commit 50186da

Please sign in to comment.