Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CI] Fix PyLint errors. #10837

Merged
merged 6 commits into from
Sep 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions python-package/xgboost/callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,13 @@
import numpy

from . import collective
from .core import Booster, DMatrix, XGBoostError, _parse_eval_str
from .core import (
Booster,
DMatrix,
XGBoostError,
_deprecate_positional_args,
_parse_eval_str,
)

__all__ = [
"TrainingCallback",
Expand Down Expand Up @@ -346,8 +352,10 @@ class EarlyStopping(TrainingCallback):
"""

# pylint: disable=too-many-arguments
@_deprecate_positional_args
def __init__(
self,
*,
rounds: int,
metric_name: Optional[str] = None,
data_name: Optional[str] = None,
Expand Down Expand Up @@ -375,7 +383,7 @@ def before_training(self, model: _Model) -> _Model:
return model

def _update_rounds(
self, score: _Score, name: str, metric: str, model: _Model, epoch: int
self, *, score: _Score, name: str, metric: str, model: _Model, epoch: int
) -> bool:
def get_s(value: _Score) -> float:
"""get score if it's cross validation history."""
Expand Down Expand Up @@ -471,7 +479,9 @@ def after_iteration(

# The latest score
score = data_log[metric_name][-1]
return self._update_rounds(score, data_name, metric_name, model, epoch)
return self._update_rounds(
score=score, name=data_name, metric=metric_name, model=model, epoch=epoch
)

def after_training(self, model: _Model) -> _Model:
if not self.save_best:
Expand Down
7 changes: 6 additions & 1 deletion python-package/xgboost/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -905,7 +905,7 @@ def __init__(
return

handle, feature_names, feature_types = dispatch_data_backend(
data,
data=data,
missing=self.missing,
threads=self.nthread,
feature_names=feature_names,
Expand Down Expand Up @@ -1692,6 +1692,7 @@ class ExtMemQuantileDMatrix(DMatrix):
def __init__( # pylint: disable=super-init-not-called
self,
data: DataIter,
*,
missing: Optional[float] = None,
nthread: Optional[int] = None,
max_bin: Optional[int] = None,
Expand Down Expand Up @@ -2343,9 +2344,11 @@ def eval(self, data: DMatrix, name: str = "eval", iteration: int = 0) -> str:
return self.eval_set([(data, name)], iteration)

# pylint: disable=too-many-function-args
@_deprecate_positional_args
def predict(
self,
data: DMatrix,
*,
output_margin: bool = False,
pred_leaf: bool = False,
pred_contribs: bool = False,
Expand Down Expand Up @@ -2478,9 +2481,11 @@ def assign_type(t: int) -> None:
return _prediction_output(shape, dims, preds, False)

# pylint: disable=too-many-statements
@_deprecate_positional_args
def inplace_predict(
self,
data: DataType,
*,
iteration_range: IterationRange = (0, 0),
predict_type: str = "value",
missing: float = np.nan,
Expand Down
45 changes: 39 additions & 6 deletions python-package/xgboost/dask/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,8 +339,8 @@ def __init__(

self._init = client.sync(
self._map_local_data,
client,
data,
client=client,
data=data,
label=label,
weights=weight,
base_margin=base_margin,
Expand All @@ -355,6 +355,7 @@ def __await__(self) -> Generator:

async def _map_local_data(
self,
*,
client: "distributed.Client",
data: _DataT,
label: Optional[_DaskCollection] = None,
Expand Down Expand Up @@ -589,6 +590,7 @@ def __init__(
self,
data: List[Any],
label: Optional[List[Any]] = None,
*,
weight: Optional[List[Any]] = None,
base_margin: Optional[List[Any]] = None,
qid: Optional[List[Any]] = None,
Expand Down Expand Up @@ -712,6 +714,7 @@ def _create_fn_args(self, worker_addr: str) -> Dict[str, Any]:


def _create_quantile_dmatrix(
*,
feature_names: Optional[FeatureNames],
feature_types: Optional[Union[Any, List[Any]]],
feature_weights: Optional[Any],
Expand Down Expand Up @@ -757,6 +760,7 @@ def _create_quantile_dmatrix(


def _create_dmatrix(
*,
feature_names: Optional[FeatureNames],
feature_types: Optional[Union[Any, List[Any]]],
feature_weights: Optional[Any],
Expand Down Expand Up @@ -927,6 +931,7 @@ def _get_dmatrices(


async def _train_async(
*,
client: "distributed.Client",
global_config: Dict[str, Any],
dconfig: Optional[Dict[str, Any]],
Expand All @@ -947,7 +952,7 @@ async def _train_async(
_rabit_args = await _get_rabit_args(len(workers), dconfig, client)
_check_distributed_params(params)

def dispatched_train(
def dispatched_train( # pylint: disable=too-many-positional-arguments
parameters: Dict,
rabit_args: Dict[str, Union[str, int]],
train_id: int,
Expand Down Expand Up @@ -1115,6 +1120,7 @@ def _maybe_dataframe(


async def _direct_predict_impl( # pylint: disable=too-many-branches
*,
mapped_predict: Callable,
booster: "distributed.Future",
data: _DataT,
Expand Down Expand Up @@ -1249,6 +1255,7 @@ async def _predict_async(
global_config: Dict[str, Any],
model: Union[Booster, Dict, "distributed.Future"],
data: _DataT,
*,
output_margin: bool,
missing: float,
pred_leaf: bool,
Expand Down Expand Up @@ -1304,7 +1311,12 @@ def mapped_predict(
)
)
return await _direct_predict_impl(
mapped_predict, _booster, data, None, _output_shape, meta
mapped_predict=mapped_predict,
booster=_booster,
data=data,
base_margin=None,
output_shape=_output_shape,
meta=meta,
)

output_shape, _ = await client.compute(
Expand Down Expand Up @@ -1392,10 +1404,12 @@ def dispatched_predict(booster: Booster, part: Dict[str, Any]) -> numpy.ndarray:
return predictions


@_deprecate_positional_args
def predict( # pylint: disable=unused-argument
client: Optional["distributed.Client"],
model: Union[TrainReturnT, Booster, "distributed.Future"],
data: Union[DaskDMatrix, _DataT],
*,
output_margin: bool = False,
missing: float = numpy.nan,
pred_leaf: bool = False,
Expand Down Expand Up @@ -1447,6 +1461,7 @@ def predict( # pylint: disable=unused-argument


async def _inplace_predict_async( # pylint: disable=too-many-branches
*,
client: "distributed.Client",
global_config: Dict[str, Any],
model: Union[Booster, Dict, "distributed.Future"],
Expand Down Expand Up @@ -1501,14 +1516,21 @@ def mapped_predict(
)
)
return await _direct_predict_impl(
mapped_predict, booster, data, base_margin, shape, meta
mapped_predict=mapped_predict,
booster=booster,
data=data,
base_margin=base_margin,
output_shape=shape,
meta=meta,
)


@_deprecate_positional_args
def inplace_predict( # pylint: disable=unused-argument
client: Optional["distributed.Client"],
model: Union[TrainReturnT, Booster, "distributed.Future"],
data: _DataT,
*,
iteration_range: IterationRange = (0, 0),
predict_type: str = "value",
missing: float = numpy.nan,
Expand Down Expand Up @@ -1615,6 +1637,7 @@ class DaskScikitLearnBase(XGBModel):
async def _predict_async(
self,
data: _DataT,
*,
output_margin: bool,
validate_features: bool,
base_margin: Optional[_DaskCollection],
Expand Down Expand Up @@ -1652,9 +1675,11 @@ async def _predict_async(
)
return predts

@_deprecate_positional_args
def predict(
self,
X: _DataT,
*,
output_margin: bool = False,
validate_features: bool = True,
base_margin: Optional[_DaskCollection] = None,
Expand Down Expand Up @@ -1765,6 +1790,7 @@ async def _fit_async(
self,
X: _DataT,
y: _DaskCollection,
*,
sample_weight: Optional[_DaskCollection],
base_margin: Optional[_DaskCollection],
eval_set: Optional[Sequence[Tuple[_DaskCollection, _DaskCollection]]],
Expand Down Expand Up @@ -1855,6 +1881,7 @@ async def _fit_async(
self,
X: _DataT,
y: _DaskCollection,
*,
sample_weight: Optional[_DaskCollection],
base_margin: Optional[_DaskCollection],
eval_set: Optional[Sequence[Tuple[_DaskCollection, _DaskCollection]]],
Expand Down Expand Up @@ -1999,13 +2026,18 @@ def predict_proba(
async def _predict_async(
self,
data: _DataT,
*,
output_margin: bool,
validate_features: bool,
base_margin: Optional[_DaskCollection],
iteration_range: Optional[IterationRange],
) -> _DaskCollection:
pred_probs = await super()._predict_async(
data, output_margin, validate_features, base_margin, iteration_range
data,
output_margin=output_margin,
validate_features=validate_features,
base_margin=base_margin,
iteration_range=iteration_range,
)
if output_margin:
return pred_probs
Expand Down Expand Up @@ -2049,6 +2081,7 @@ async def _fit_async(
self,
X: _DataT,
y: _DaskCollection,
*,
group: Optional[_DaskCollection],
qid: Optional[_DaskCollection],
sample_weight: Optional[_DaskCollection],
Expand Down
Loading
Loading