Skip to content

Commit 7a3a587

Browse files
committed
use simple inmport
1 parent 342d249 commit 7a3a587

17 files changed

Lines changed: 33 additions & 82 deletions

File tree

ibis/backends/__init__.py

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -101,9 +101,7 @@ def _import_pyarrow():
101101
"Exporting to arrow formats requires `pyarrow` but it is not installed"
102102
)
103103
else:
104-
from ibis.util import apply_pyarrow_hotfix
105-
106-
apply_pyarrow_hotfix()
104+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
107105

108106
return pyarrow
109107

@@ -1609,11 +1607,10 @@ class PyArrowExampleLoader(ExampleLoader):
16091607
temporary_example: bool = True
16101608

16111609
def _load_parquet(self, *, path: str | Path, table_name: str) -> ir.Table:
1612-
from ibis.util import apply_pyarrow_hotfix
1613-
1614-
apply_pyarrow_hotfix()
16151610
import pyarrow.parquet as pq
16161611

1612+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
1613+
16171614
table = pq.read_table(path)
16181615
return self.create_table(
16191616
table_name,
@@ -1623,12 +1620,12 @@ def _load_parquet(self, *, path: str | Path, table_name: str) -> ir.Table:
16231620
)
16241621

16251622
def _load_csv(self, *, path: str | Path, table_name: str) -> ir.Table:
1626-
from ibis.util import apply_pyarrow_hotfix
16271623

1628-
apply_pyarrow_hotfix()
16291624
import pyarrow as pa
16301625
import pyarrow.csv
16311626

1627+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
1628+
16321629
# The convert options lets pyarrow treat empty strings as null for
16331630
# string columns, but not quoted empty strings.
16341631
table = pyarrow.csv.read_csv(

ibis/backends/athena/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,7 @@
2626
from ibis.backends import CanCreateDatabase, NoExampleLoader, UrlFromPath
2727
from ibis.backends.sql import SQLBackend
2828
from ibis.backends.sql.compilers.base import AlterTable, RenameTable
29-
from ibis.util import apply_pyarrow_hotfix
30-
31-
apply_pyarrow_hotfix()
29+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
3230

3331
if TYPE_CHECKING:
3432
from collections.abc import Callable, Mapping

ibis/backends/clickhouse/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,7 @@
3434
from ibis.backends.clickhouse.converter import ClickHousePandasData
3535
from ibis.backends.sql import SQLBackend
3636
from ibis.backends.sql.compilers.base import C
37-
from ibis.util import apply_pyarrow_hotfix
38-
39-
apply_pyarrow_hotfix()
37+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
4038

4139
if TYPE_CHECKING:
4240
from collections.abc import Iterable, Iterator, Mapping

ibis/backends/databricks/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,7 @@
2929
from ibis.backends.sql import SQLBackend
3030
from ibis.backends.sql.compilers.base import STAR, AlterTable, RenameTable
3131
from ibis.backends.sql.datatypes import DatabricksType
32-
from ibis.util import apply_pyarrow_hotfix
33-
34-
apply_pyarrow_hotfix()
32+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
3533

3634
if TYPE_CHECKING:
3735
from collections.abc import Callable, Iterable, Mapping

ibis/backends/datafusion/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,19 +31,17 @@
3131
)
3232
from ibis.backends.sql import SQLBackend
3333
from ibis.backends.sql.compilers.base import C
34+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
3435
from ibis.common.dispatch import lazy_singledispatch
3536
from ibis.expr.operations.udf import InputType
3637
from ibis.formats.pyarrow import PyArrowSchema, PyArrowType
3738
from ibis.util import (
38-
apply_pyarrow_hotfix,
3939
gen_name,
4040
normalize_filename,
4141
normalize_filenames,
4242
warn_deprecated,
4343
)
4444

45-
apply_pyarrow_hotfix()
46-
4745
try:
4846
from datafusion import ExecutionContext as SessionContext
4947
except ImportError:

ibis/backends/datafusion/udfs.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,7 @@
88

99
import ibis.common.exceptions as com
1010
import ibis.expr.datatypes as dt
11-
from ibis.util import apply_pyarrow_hotfix
12-
13-
apply_pyarrow_hotfix()
11+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
1412

1513

1614
def _extract_epoch_seconds(array) -> dt.int32:

ibis/backends/duckdb/__init__.py

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -33,11 +33,9 @@
3333
)
3434
from ibis.backends.sql import SQLBackend
3535
from ibis.backends.sql.compilers.base import STAR, AlterTable, C, RenameTable
36+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
3637
from ibis.common.dispatch import lazy_singledispatch
3738
from ibis.expr.operations.udf import InputType
38-
from ibis.util import apply_pyarrow_hotfix
39-
40-
apply_pyarrow_hotfix()
4139

4240
if TYPE_CHECKING:
4341
from collections.abc import Iterable, Mapping, MutableMapping, Sequence
@@ -1383,10 +1381,6 @@ def to_pyarrow_batches(
13831381
"""
13841382
import pyarrow as pa
13851383

1386-
from ibis.util import apply_pyarrow_hotfix
1387-
1388-
apply_pyarrow_hotfix()
1389-
13901384
self._run_pre_execute_hooks(expr)
13911385
table = expr.as_table()
13921386
sql = self.compile(table, limit=limit, params=params)
@@ -1428,10 +1422,6 @@ def execute(
14281422
import pandas as pd
14291423
import pyarrow.types as pat
14301424

1431-
from ibis.util import apply_pyarrow_hotfix
1432-
1433-
apply_pyarrow_hotfix()
1434-
14351425
from ibis.backends.duckdb.converter import DuckDBPandasData
14361426

14371427
rel = self._to_duckdb_relation(expr, params=params, limit=limit, **kwargs)

ibis/backends/flink/__init__.py

Lines changed: 5 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -494,11 +494,8 @@ def create_table(
494494
import pandas as pd
495495
import pyarrow as pa
496496

497-
from ibis.util import apply_pyarrow_hotfix
498-
499-
apply_pyarrow_hotfix()
500-
501497
import ibis.expr.types as ir
498+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
502499

503500
if obj is None and schema is None:
504501
raise exc.IbisError("`schema` or `obj` is required")
@@ -941,9 +938,7 @@ def insert(
941938
import pandas as pd
942939
import pyarrow as pa
943940

944-
from ibis.util import apply_pyarrow_hotfix
945-
946-
apply_pyarrow_hotfix()
941+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
947942

948943
if isinstance(obj, ir.Table):
949944
statement = InsertSelect(
@@ -989,9 +984,7 @@ def to_pyarrow(
989984
) -> pa.Table:
990985
import pyarrow as pa
991986

992-
from ibis.util import apply_pyarrow_hotfix
993-
994-
apply_pyarrow_hotfix()
987+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
995988

996989
pyarrow_batches = iter(
997990
self.to_pyarrow_batches(expr, params=params, limit=limit, **kwargs)
@@ -1019,9 +1012,7 @@ def to_pyarrow_batches(
10191012
):
10201013
import pyarrow as pa
10211014

1022-
from ibis.util import apply_pyarrow_hotfix
1023-
1024-
apply_pyarrow_hotfix()
1015+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
10251016

10261017
ibis_table = expr.as_table()
10271018

@@ -1071,15 +1062,12 @@ def _from_pyflink_table_to_pyarrow_batches(
10711062
chunk_size: int | None = None,
10721063
):
10731064
import pyarrow as pa
1074-
1075-
from ibis.util import apply_pyarrow_hotfix
1076-
1077-
apply_pyarrow_hotfix()
10781065
from pyflink.java_gateway import get_gateway
10791066
from pyflink.table.serializers import ArrowSerializer
10801067
from pyflink.table.types import create_arrow_schema
10811068

10821069
from ibis.backends.flink.datatypes import get_field_data_types
1070+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
10831071
# Note (mehmet): Implementation of this is based on
10841072
# pyflink/table/table.py: to_pandas().
10851073

ibis/backends/impala/__init__.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1335,10 +1335,7 @@ def to_pyarrow(
13351335
) -> pa.Table:
13361336
import pyarrow as pa
13371337

1338-
from ibis.util import apply_pyarrow_hotfix
1339-
1340-
apply_pyarrow_hotfix()
1341-
1338+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
13421339
from ibis.formats.pyarrow import PyArrowData
13431340

13441341
self._run_pre_execute_hooks(expr)

ibis/backends/pyspark/__init__.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1043,10 +1043,7 @@ def to_pyarrow(
10431043
)
10441044
import pyarrow as pa
10451045

1046-
from ibis.util import apply_pyarrow_hotfix
1047-
1048-
apply_pyarrow_hotfix()
1049-
1046+
from ibis.common import import_to_try_pyarrow_hotfix # noqa: F401
10501047
from ibis.formats.pyarrow import PyArrowData
10511048

10521049
table_expr = expr.as_table()

0 commit comments

Comments
 (0)