Skip to content

Commit

Permalink
ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
AlongWY committed Jun 27, 2023
1 parent 786137d commit 5899505
Show file tree
Hide file tree
Showing 23 changed files with 30 additions and 215 deletions.
22 changes: 0 additions & 22 deletions .github/workflows/code-quality-main.yaml

This file was deleted.

36 changes: 0 additions & 36 deletions .github/workflows/code-quality-pr.yaml

This file was deleted.

11 changes: 11 additions & 0 deletions .github/workflows/ruff.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Same as `code-quality-pr.yaml` but triggered on commit to main branch
# and runs on all files (instead of only the changed ones)

name: Ruff
on: [ push, pull_request ]
jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: chartboost/ruff-action@v1
122 changes: 0 additions & 122 deletions .pre-commit-config.yaml

This file was deleted.

6 changes: 2 additions & 4 deletions python/core/ltp_core/algorithms/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
try:
import torch
from ltp_extension.algorithms import eisner as rust_eisner
from ltp_extension.algorithms import get_entities

def eisner(scores, mask, remove_root=False):

Expand All @@ -19,6 +18,5 @@ def eisner(scores, mask, remove_root=False):

return result

except Exception as e:
from .eisner import eisner
from .get_entities import get_entities
except Exception:
pass
2 changes: 0 additions & 2 deletions python/core/ltp_core/datamodules/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +0,0 @@
from .multi_task_datamodule import MultiTaskDataModule
from .task_datamodule import TaskDataModule
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ def build_dataset(task_name):
import os

os.environ["TOKENIZERS_PARALLELISM"] = "true"
raw_datasets = load_dataset("glue", task_name)
load_dataset("glue", task_name)
2 changes: 1 addition & 1 deletion python/core/ltp_core/datamodules/components/conllu.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def build_vocabs(data_dir, *files, min_freq=5):
for dep in values[row]
]
counter.update(itertools.chain(*deps))
except Exception as e:
except Exception:
counter.update("_")
else:
counter.update(values[row])
Expand Down
2 changes: 1 addition & 1 deletion python/core/ltp_core/datamodules/utils/collate.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def collate(batch):
storage = elem.storage()._new_shared(numel, device=elem.device)
out = elem.new(storage).resize_(len(batch), *list(elem.size()))
return torch.stack(batch, 0, out=out)
except Exception as e:
except Exception:
return torch.nn.utils.rnn.pad_sequence(batch, batch_first=True)
elif (
elem_type.__module__ == "numpy"
Expand Down
2 changes: 1 addition & 1 deletion python/core/ltp_core/models/criterion/token.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from torch.nn import CrossEntropyLoss, Module

from ltp_core.models.components.token import TokenClassifierResult
from ltp_core.models.functional.distill import kd_ce_loss, kd_mse_loss
from ltp_core.models.functional.distill import kd_ce_loss


class TokenLoss(Module):
Expand Down
2 changes: 1 addition & 1 deletion python/core/ltp_core/models/lit_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def main():
print(metric)

model.on_epoch_end()
optimizer = model.configure_optimizers()
model.configure_optimizers()


if __name__ == "__main__":
Expand Down
2 changes: 0 additions & 2 deletions python/core/ltp_core/models/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +0,0 @@
from ltp_core.models.utils.instantiate import instantiate, instantiate_omega
from ltp_core.models.utils.transformer import load_transformers
12 changes: 0 additions & 12 deletions python/core/ltp_core/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +0,0 @@
from ltp_core.utils.pylogger import get_pylogger
from ltp_core.utils.rich_utils import enforce_tags, print_config_tree
from ltp_core.utils.utils import (
close_loggers,
extras,
get_metric_value,
instantiate_callbacks,
instantiate_loggers,
log_hyperparameters,
save_file,
task_wrapper,
)
2 changes: 1 addition & 1 deletion python/core/ltp_core/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import warnings
from importlib.util import find_spec
from pathlib import Path
from typing import Any, Callable, Dict, List
from typing import Callable, List

import hydra
from omegaconf import DictConfig
Expand Down
2 changes: 1 addition & 1 deletion python/extension/examples/legacy_train.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from ltp_extension.perceptron import Algorithm, CWSModel, CWSTrainer, Model, ModelType, Trainer
from ltp_extension.perceptron import Algorithm, CWSModel, CWSTrainer, ModelType, Trainer


def train_cws():
Expand Down
5 changes: 3 additions & 2 deletions python/extension/src/perceptron/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ impl PyModel {
Ok(())
}

#[pyo3(signature = ( * args, parallelism = true))]
pub fn __call__(&self, py: Python, args: &PyTuple, parallelism: bool) -> PyResult<PyObject> {
let first = args.get_item(0)?;
let is_single = match &self.model {
Expand Down Expand Up @@ -205,7 +206,7 @@ impl PyModel {
}

/// Predict a sentence
#[pyo3(text_signature = "(self, *args)")]
#[pyo3(text_signature = "(self, *args)", signature = ( * args))]
pub fn predict(&self, py: Python, args: &PyTuple) -> PyResult<PyObject> {
Ok(match &self.model {
EnumModel::CWS(model) => {
Expand Down Expand Up @@ -246,7 +247,7 @@ impl PyModel {
}

/// Predict batched sentences
#[pyo3(text_signature = "(self, *args, parallelism = True)")]
#[pyo3(text_signature = "(self, *args, parallelism = True)", signature = ( * args, parallelism = true))]
pub fn batch_predict(
&self,
py: Python,
Expand Down
3 changes: 2 additions & 1 deletion python/extension/src/perceptron/specialization/cws.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ impl PyCWSModel {
Ok(())
}

#[pyo3(signature = ( * args, parallelism = true))]
pub fn __call__(&self, py: Python, args: &PyTuple, parallelism: bool) -> PyResult<PyObject> {
let first = args.get_item(0)?;
let is_single = match first.get_type().name()? {
Expand Down Expand Up @@ -156,7 +157,7 @@ impl PyCWSModel {
}

/// Predict batched sentences
#[pyo3(text_signature = "(self, batch_text, parallelism=True)")]
#[pyo3(text_signature = "(self, batch_text, parallelism=True)", signature = (batch_text, parallelism = true))]
pub fn batch_predict(
&self,
py: Python,
Expand Down
1 change: 1 addition & 0 deletions python/extension/src/perceptron/specialization/ner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ impl PyNERModel {
Ok(Self::inner_load(path)?)
}

#[pyo3(signature = ( * args, parallelism = true))]
pub fn __call__(&self, py: Python, args: &PyTuple, parallelism: bool) -> PyResult<PyObject> {
let first = args.get_item(0)?;
let is_single = match first.get_type().name()? {
Expand Down
1 change: 1 addition & 0 deletions python/extension/src/perceptron/specialization/pos.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ impl PyPOSModel {
Ok(Self::inner_load(path)?)
}

#[pyo3(signature = ( * args, parallelism = true))]
pub fn __call__(&self, py: Python, args: &PyTuple, parallelism: bool) -> PyResult<PyObject> {
let first = args.get_item(0)?;
let is_single = match first.get_type().name()? {
Expand Down
2 changes: 1 addition & 1 deletion python/extension/src/stnsplit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ impl StnSplit {
}

/// batch split to sentences
#[pyo3(text_signature = "(self, batch_text, threads=8)")]
#[pyo3(text_signature = "(self, batch_text, threads=8)", signature = (batch_text, threads = 8))]
pub fn batch_split(
&self,
py: Python,
Expand Down
1 change: 0 additions & 1 deletion python/interface/examples/conllu.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#! /usr/bin/env python
# Author: Yunlong Feng <[email protected]>

from collections import namedtuple

from ltp import LTP

Expand Down
2 changes: 1 addition & 1 deletion python/interface/ltp/legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from ltp.generic import LTPOutput
from ltp.mixin import ModelHubMixin
from ltp_extension.algorithms import Hook, get_entities
from ltp_extension.perceptron import CharacterType, CWSModel, NERModel, POSModel
from ltp_extension.perceptron import CWSModel, NERModel, POSModel


class LTP(ModelHubMixin):
Expand Down
3 changes: 1 addition & 2 deletions python/interface/ltp/mixin.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import json
import os
from pathlib import Path
from typing import Dict, Optional, Union
from typing import Dict, Optional

from ltp.utils import get_pylogger

Expand Down

0 comments on commit 5899505

Please sign in to comment.