Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: pre-commit

on:
pull_request:
push:
branches: [master]

jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1
37 changes: 37 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# pre-commit is a tool to perform a predefined set of tasks manually and/or
# automatically before git commits are made.
#
# Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level
#
# Common tasks
#
# - Register git hooks: pre-commit install
# - Run on all files: pre-commit run --all-files
#
# These pre-commit hooks are run as CI.
#
# NOTE: if it can be avoided, add configs/args in pyproject.toml or below instead of creating a new `.config.file`.
# https://pre-commit.ci/#configuration
ci:
autoupdate_schedule: monthly
autofix_commit_msg: |
[pre-commit.ci] Apply automatic pre-commit fixes

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: end-of-file-fixer
exclude: '\.(svg|patch)$'
- id: trailing-whitespace
exclude: '\.(svg|patch)$'
- id: check-json
- id: check-yaml
args: [--allow-multiple-documents, --unsafe]
- id: check-toml

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.6
hooks:
- id: ruff
args: ["--fix"]
1 change: 0 additions & 1 deletion WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -159,4 +159,3 @@ load(
)

nccl_configure(name = "local_config_nccl")

90 changes: 90 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
line-length = 88

[lint]

select = [
# pycodestyle
"E",
"W",
# Pyflakes
"F",
# pyupgrade
"UP",
# flake8-bugbear
"B",
# flake8-simplify
"SIM",
# isort
"I",
# pep8 naming
"N",
# pydocstyle
"D",
# annotations
"ANN",
# debugger
"T10",
# flake8-pytest
"PT",
# flake8-return
"RET",
# flake8-unused-arguments
"ARG",
# flake8-fixme
"FIX",
# flake8-eradicate
"ERA",
# pandas-vet
"PD",
# numpy-specific rules
"NPY",
]

ignore = [
"D104", # Missing docstring in public package
"D100", # Missing docstring in public module
"D211", # No blank line before class
"PD901", # Avoid using 'df' for pandas dataframes. Perfectly fine in functions with limited scope
"ANN201", # Missing return type annotation for public function (makes no sense for NoneType return types...)
"ANN101", # Missing type annotation for `self`
"ANN204", # Missing return type annotation for special method
"ANN002", # Missing type annotation for `*args`
"ANN003", # Missing type annotation for `**kwargs`
"D105", # Missing docstring in magic method
"D203", # 1 blank line before after class docstring
"D204", # 1 blank line required after class docstring
"D413", # 1 blank line after parameters
"SIM108", # Simplify if/else to one line; not always clearer
"D206", # Docstrings should be indented with spaces; unnecessary when running ruff-format
"E501", # Line length too long; unnecessary when running ruff-format
"W191", # Indentation contains tabs; unnecessary when running ruff-format

# FIX AND REMOVE BELOW CODES:
"ANN001", # Missing type annotation for function argument
"ANN102", # Missing type annotation for `cls` in classmethod
"ANN202", # Missing return type annotation for private function
"ANN205", # Missing return type annotation for staticmethod
"ANN206", # Missing return type annotation for classmethod
"D102", # Missing docstring in public method
"D103", # Missing docstring in public function
"D107", # Missing docstring in `__init__`
"E402", # Module level import not at top of file
"ERA001", # Found commented-out code
"F401", # `module` imported but unused
"F405", # Name may be undefined, or defined from star imports
"FIX002", # Line contains TODO, consider resolving the issue
"N802", # Function name should be lowercase
"PT009", # Use a regular `assert` instead of unittest-style `assertEqual` / `assertIsInstance`
"PT027", # Use `pytest.raises` instead of unittest-style `assertRaisesRegex`
"UP028", # Replace `yield` over `for` loop with `yield from`
"UP029", # Unnecessary builtin import
"RET503", # Missing explicit `return` at the end of function able to return non-`None` value
]

[lint.pyupgrade]
# Preserve types, even if a file imports `from __future__ import annotations`.
# Remove when Python 3.9 is no longer supported
keep-runtime-typing = true

[lint.pydocstyle]
convention = "google"
6 changes: 3 additions & 3 deletions tensorflow_serving/apis/model_service_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
# To regenerate run
# python -m grpc.tools.protoc --python_out=. --grpc_python_out=. -I. tensorflow_serving/apis/model_service.proto

import sys
import sys # noqa: I001
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor as _descriptor # noqa: I001
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
Expand All @@ -29,7 +29,7 @@
_sym_db = _symbol_database.Default()


from tensorflow_serving.apis import get_model_status_pb2 as tensorflow__serving_dot_apis_dot_get__model__status__pb2
from tensorflow_serving.apis import get_model_status_pb2 as tensorflow__serving_dot_apis_dot_get__model__status__pb2 # noqa: I001
from tensorflow_serving.apis import model_management_pb2 as tensorflow__serving_dot_apis_dot_model__management__pb2

DESCRIPTOR = _descriptor.FileDescriptor(
Expand Down
18 changes: 9 additions & 9 deletions tensorflow_serving/apis/model_service_pb2_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,16 @@
# To regenerate run
# python -m grpc.tools.protoc --python_out=. --grpc_python_out=. -I. tensorflow_serving/apis/model_service.proto

import grpc
import grpc # noqa: I001

from tensorflow_serving.apis import get_model_status_pb2 as tensorflow__serving_dot_apis_dot_get__model__status__pb2
from tensorflow_serving.apis import model_management_pb2 as tensorflow__serving_dot_apis_dot_model__management__pb2


class ModelServiceStub(object):
class ModelServiceStub(object): # noqa: UP004
"""ModelService provides methods to query and update the state of the server,
e.g. which models/versions are being served.
"""
""" # noqa: D205

def __init__(self, channel):
"""Constructor.
Expand All @@ -50,26 +50,26 @@ def __init__(self, channel):
)


class ModelServiceServicer(object):
class ModelServiceServicer(object): # noqa: UP004
"""ModelService provides methods to query and update the state of the server,
e.g. which models/versions are being served.
"""
""" # noqa: D205

def GetModelStatus(self, request, context):
def GetModelStatus(self, request, context): # noqa: ARG002
"""Gets status of model. If the ModelSpec in the request does not specify
version, information about all versions of the model will be returned. If
the ModelSpec in the request does specify a version, the status of only
that version will be returned.
"""
""" # noqa: D205
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def HandleReloadConfigRequest(self, request, context):
def HandleReloadConfigRequest(self, request, context): # noqa: ARG002
"""Reloads the set of served models. The new config supersedes the old one,
so if a model is omitted from the new config it will be unloaded and no
longer served.
"""
""" # noqa: D205
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
Expand Down
6 changes: 3 additions & 3 deletions tensorflow_serving/apis/prediction_service_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow_serving/apis/prediction_service.proto

import sys
import sys # noqa: I001
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor as _descriptor # noqa: I001
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
Expand All @@ -33,7 +33,7 @@
_sym_db = _symbol_database.Default()


from tensorflow_serving.apis import classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2
from tensorflow_serving.apis import classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2 # noqa: I001
from tensorflow_serving.apis import get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2
from tensorflow_serving.apis import inference_pb2 as tensorflow__serving_dot_apis_dot_inference__pb2
from tensorflow_serving.apis import predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2
Expand Down
30 changes: 15 additions & 15 deletions tensorflow_serving/apis/prediction_service_pb2_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# source: tensorflow_serving/apis/prediction_service.proto
# To regenerate run
# python -m grpc.tools.protoc --python_out=. --grpc_python_out=. -I. tensorflow_serving/apis/prediction_service.proto
import grpc
import grpc # noqa: I001

from tensorflow_serving.apis import classification_pb2 as tensorflow__serving_dot_apis_dot_classification__pb2
from tensorflow_serving.apis import get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2
Expand All @@ -25,11 +25,11 @@
from tensorflow_serving.apis import regression_pb2 as tensorflow__serving_dot_apis_dot_regression__pb2


class PredictionServiceStub(object):
class PredictionServiceStub(object): # noqa: UP004
"""open source marker; do not remove
PredictionService provides access to machine-learned models loaded by
model_servers.
"""
""" # noqa: D205

def __init__(self, channel):
"""Constructor.
Expand Down Expand Up @@ -64,43 +64,43 @@ def __init__(self, channel):
)


class PredictionServiceServicer(object):
class PredictionServiceServicer(object): # noqa: UP004
"""open source marker; do not remove
PredictionService provides access to machine-learned models loaded by
model_servers.
"""
""" # noqa: D205

def Classify(self, request, context):
def Classify(self, request, context): # noqa: ARG002
"""Classify.
"""
""" # noqa: D200
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def Regress(self, request, context):
def Regress(self, request, context): # noqa: ARG002
"""Regress.
"""
""" # noqa: D200
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def Predict(self, request, context):
def Predict(self, request, context): # noqa: ARG002
"""Predict -- provides access to loaded TensorFlow model.
"""
""" # noqa: D200
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def MultiInference(self, request, context):
def MultiInference(self, request, context): # noqa: ARG002
"""MultiInference API for multi-headed models.
"""
""" # noqa: D200
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def GetModelMetadata(self, request, context):
def GetModelMetadata(self, request, context): # noqa: ARG002
"""GetModelMetadata - provides access to metadata for loaded models.
"""
""" # noqa: D200
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.
# ==============================================================================

import tensorflow.compat.v1 as tf
import tensorflow.compat.v1 as tf # noqa: I001
FLAGS = tf.app.flags.FLAGS

tf.app.flags.DEFINE_string("output_dir", "/tmp/matrix_half_plus_two/1",
Expand All @@ -28,7 +28,7 @@ def _generate_saved_model_for_matrix_half_plus_two(export_dir):
the result will be [[2.5, 3, 3.5], [4, 4.5, 5], [5.5, 6, 6.5]].
Args:
export_dir: The directory where to write SavedModel files.
"""
""" # noqa: D205, D208, D214, D411
builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
with tf.Session() as session:
x = tf.placeholder(tf.float32, shape=[None, 3, 3], name="x")
Expand Down
6 changes: 3 additions & 3 deletions tensorflow_serving/example/mnist_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
mnist_client.py --num_tests=100 --server=localhost:9000
"""

from __future__ import print_function
from __future__ import print_function # noqa: I001, UP010

import sys
import threading
Expand All @@ -48,7 +48,7 @@
FLAGS = tf.compat.v1.app.flags.FLAGS


class _ResultCounter(object):
class _ResultCounter(object): # noqa: UP004
"""Counter for the prediction results."""

def __init__(self, num_tests, concurrency):
Expand Down Expand Up @@ -94,7 +94,7 @@ def _create_rpc_callback(label, result_counter):
result_counter: Counter for the prediction result.
Returns:
The callback function.
"""
""" # noqa: D410, D411
def _callback(result_future):
"""Callback function.

Expand Down
Loading
Loading