Skip to content

Commit

Permalink
chore(python): Bump mypy from 1.11.1 to 1.13.0 in /py-polars (#19569)
Browse files Browse the repository at this point in the history
Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Stijn de Gooijer <[email protected]>
  • Loading branch information
dependabot[bot] and stinodego authored Nov 1, 2024
1 parent 1d5c640 commit 676f052
Show file tree
Hide file tree
Showing 7 changed files with 20 additions and 16 deletions.
12 changes: 6 additions & 6 deletions py-polars/polars/dataframe/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -3990,13 +3990,13 @@ def unpack_table_name(name: str) -> tuple[str | None, str | None, str]:
else (connection, False)
)
with (
conn if can_close_conn else contextlib.nullcontext(),
conn.cursor() as cursor,
conn if can_close_conn else contextlib.nullcontext(), # type: ignore[union-attr]
conn.cursor() as cursor, # type: ignore[union-attr]
):
catalog, db_schema, unpacked_table_name = unpack_table_name(table_name)
n_rows: int
if adbc_version >= (0, 7):
if "sqlite" in conn.adbc_get_info()["driver_name"].lower():
if "sqlite" in conn.adbc_get_info()["driver_name"].lower(): # type: ignore[union-attr]
if if_table_exists == "replace":
# note: adbc doesn't (yet) support 'replace' for sqlite
cursor.execute(f"DROP TABLE IF EXISTS {table_name}")
Expand Down Expand Up @@ -4026,7 +4026,7 @@ def unpack_table_name(name: str) -> tuple[str | None, str | None, str]:
mode=mode,
**(engine_options or {}),
)
conn.commit()
conn.commit() # type: ignore[union-attr]
return n_rows

elif engine == "sqlalchemy":
Expand Down Expand Up @@ -4853,7 +4853,7 @@ def glimpse(
def _parse_column(col_name: str, dtype: PolarsDataType) -> tuple[str, str, str]:
fn = repr if schema[col_name] == String else str
values = self[:max_n_values, col_name].to_list()
val_str = ", ".join(fn(v) for v in values) # type: ignore[operator]
val_str = ", ".join(fn(v) for v in values)
if len(col_name) > max_colname_length:
col_name = col_name[: (max_colname_length - 1)] + "…"
return col_name, f"<{_dtype_str_repr(dtype)}>", val_str
Expand Down Expand Up @@ -9902,7 +9902,7 @@ def n_unique(self, subset: str | Expr | Sequence[str | Expr] | None = None) -> i
expr = wrap_expr(parse_into_expression(subset[0]))
else:
struct_fields = F.all() if (subset is None) else subset
expr = F.struct(struct_fields) # type: ignore[call-overload]
expr = F.struct(struct_fields)

df = self.lazy().select(expr.n_unique()).collect(_eager=True)
return 0 if df.is_empty() else df.row(0)[0]
Expand Down
2 changes: 1 addition & 1 deletion py-polars/polars/datatypes/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def is_polars_dtype(
) -> TypeGuard[PolarsDataType]:
"""Indicate whether the given input is a Polars dtype, or dtype specialization."""
check_classes = DataType if require_instantiated else (DataType, DataTypeClass)
is_dtype = isinstance(dtype, check_classes) # type: ignore[arg-type]
is_dtype = isinstance(dtype, check_classes)

if not include_unknown:
return is_dtype and dtype != Unknown
Expand Down
2 changes: 1 addition & 1 deletion py-polars/polars/expr/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ def __array_ufunc__(
root_expr = F.struct(actual_exprs)

def function(s: Series) -> Series: # pragma: no cover
args = []
args: list[Any] = []
for i, expr in enumerate(exprs):
if expr[1] and num_expr > 1:
args.append(s.struct[i])
Expand Down
8 changes: 5 additions & 3 deletions py-polars/polars/functions/lazy.py
Original file line number Diff line number Diff line change
Expand Up @@ -1867,9 +1867,11 @@ def collect_all_async(
)
prepared.append(ldf)

result = _GeventDataFrameResult() if gevent else _AioDataFrameResult()
plr.collect_all_with_callback(prepared, result._callback_all) # type: ignore[attr-defined]
return result # type: ignore[return-value]
result: (
_GeventDataFrameResult[list[DataFrame]] | _AioDataFrameResult[list[DataFrame]]
) = _GeventDataFrameResult() if gevent else _AioDataFrameResult()
plr.collect_all_with_callback(prepared, result._callback_all)
return result


def select(*exprs: IntoExpr | Iterable[IntoExpr], **named_exprs: IntoExpr) -> DataFrame:
Expand Down
2 changes: 1 addition & 1 deletion py-polars/polars/io/database/_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ async def _sqlalchemy_async_execute(self, query: TextClause, **options: Any) ->
"""Execute a query using an async SQLAlchemy connection."""
is_session = self._is_alchemy_session(self.cursor)
cursor = self.cursor.begin() if is_session else self.cursor # type: ignore[attr-defined]
async with cursor as conn:
async with cursor as conn: # type: ignore[union-attr]
if is_session and not hasattr(conn, "execute"):
conn = conn.session
result = await conn.execute(query, **options)
Expand Down
8 changes: 5 additions & 3 deletions py-polars/polars/lazyframe/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -2229,9 +2229,11 @@ def collect_async(
new_streaming=False,
)

result = _GeventDataFrameResult() if gevent else _AioDataFrameResult()
ldf.collect_with_callback(result._callback) # type: ignore[attr-defined]
return result # type: ignore[return-value]
result: _GeventDataFrameResult[DataFrame] | _AioDataFrameResult[DataFrame] = (
_GeventDataFrameResult() if gevent else _AioDataFrameResult()
)
ldf.collect_with_callback(result._callback)
return result

def collect_schema(self) -> Schema:
"""
Expand Down
2 changes: 1 addition & 1 deletion py-polars/requirements-lint.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
mypy==1.11.1
mypy[faster-cache]==1.13.0
ruff==0.7.1
typos==1.26.8

0 comments on commit 676f052

Please sign in to comment.