Hello, colleagues, another question while migrating from 0.21.1 to 0.31.0. So far reproduced only for pandas output, I have pyarrow in dependencies, however I receive following exception(we have custom artifact store implementation for Alluxio)
```╭───────────────────── Traceback (most recent call last) ──────────────────────╮
│ /tmp/pyarrow/_parquet.pyx:1714 in pyarrow._parquet.ParquetWriter.__cinit__ │
│ │
│ [Errno 2] No such file or directory: '/tmp/pyarrow/_parquet.pyx' │
│ │
│ /usr/local/lib/python3.8/site-packages/pyarrow/util.py:93 in _stringify_path │
│ │
│ 90 │ except AttributeError: │
│ 91 │ │ pass │
│ 92 │ │
│ ❱ 93 │ raise TypeError("not a path-like object") │
│ 96 def product(seq): │
╰──────────────────────────────────────────────────────────────────────────────╯
TypeError: not a path-like object
During handling of the above exception, another exception occurred:
│ /usr/local/lib/python3.8/site-packages/zenml/orchestrators/step_launcher.py: │
│ 380 in _run_step_without_step_operator │
│ │
│ 377 │ │ │ output_artifact_uris: The output artifact URIs of the curr │
│ 378 │ │ """ │
│ 379 │ │ runner = StepRunner(step=self._step, stack=self._stack) │
│ ❱ 380 │ │ runner.run( │
│ 381 │ │ │ input_artifacts=input_artifacts, │
│ 382 │ │ │ output_artifact_uris=output_artifact_uris, │
│ 383 │ │ │ step_run_info=step_run_info, │
│ │
│ /usr/local/lib/python3.8/site-packages/zenml/orchestrators/step_runner.py:12 │
│ 2 in run │
│ │
│ 119 │ │ # Store and publish the output artifacts of the step function. │
│ 120 │ │ output_annotations = parse_return_type_annotations(spec.annota │
│ 121 │ │ output_data = self._validate_outputs(return_values, output_ann │
│ ❱ 122 │ │ output_artifacts = self._store_output_artifacts( │
│ 123 │ │ │ output_data=output_data, │
│ 124 │ │ │ output_artifact_uris=output_artifact_uris, │
│ 125 │ │ │ output_materializers=output_materializers, │
│ │
│ /usr/local/lib/python3.8/site-packages/zenml/orchestrators/step_runner.py:37 │
│ 4 in _store_output_artifacts │
│ │
│ 371 │ │ │ │ artifact_store_id=artifact_store_id, │
│ 372 │ │ │ ) │
│ 373 │ │ │ output_artifacts[output_name] = output_artifact │
│ ❱ 374 │ │ │ materializer_class(uri).save(return_value) │
│ 375 │ │ return output_artifacts │
│ 376 │
│ │
│ /usr/local/lib/python3.8/site-packages/zenml/materializers/pandas_materializ │
│ er.py:130 in save │
│ │
│ 127 │ │ │
│ 128 │ │ if self.pyarrow_exists: │
│ 129 │ │ │ with fileio.open(self.parquet_path, mode="wb") as f: │
│ ❱ 130 │ │ │ │ df.to_parquet(f, compression=COMPRESSION_TYPE) │
│ 131 │ │ else: │
│ 132 │ │ │ with fileio.open(self.csv_path, mode="wb") as f: │
│ 133 │ │ │ │ df.to_csv(f, index=True) │
│ │
│ /usr/local/lib/python3.8/site-packages/pandas/util/_decorators.py:211 in │
│ wrapper │
│ │
│ 208 │ │ │ │ │ raise TypeError(msg) │
│ 209 │ │ │ │ else: │
│ 210 │ │ │ │ │ kwargs[new_arg_name] = new_arg_value │
│ ❱ 211 │ │ │ return func(*args, **kwargs) │
│ 212 │ │ │
│ 213 │ │ return cast(F, wrapper) │
│ 214 │
│ │
│ /usr/local/lib/python3.8/site-packages/pandas/core/frame.py:2975 in │
│ to_parquet │
│ │
│ 2972 │ │ """ │
│ 2973 │ │ from pandas.io.parquet import to_parquet │
│ 2974 │ │ │
│ ❱ 2975 │ │ return to_parquet( │
│ 2976 │ │ │ self, │
│ 2977 │ │ │ path, │
│ 2978 │ │ │ engine, │
│ │
│ /usr/local/lib/python3.8/site-packages/pandas/io/parquet.py:430 in │
│ to_parquet │
│ │
│ 427 │ │
│ 428 │ path_or_buf: FilePath | WriteBuffer[bytes] = io.BytesIO() if path │
│ 429 │ │
│ ❱ 430 │ impl.write( │
│ 431 │ │ df, │
│ 432 │ │ path_or_buf, │
│ 433 │ │ compression=compression, │
│ │
│ /usr/local/lib/python3.8/site-packages/pandas/io/parquet.py:204 in write │
│ │
│ 201 │ │ │ │ ) │
│ 202 │ │ │ else: │
│ 203 │ │ │ │ # write to single output file │
│ ❱ 204 │ │ │ │ self.api.parquet.write_table( │
│ 205 │ │ │ │ │ table, path_or_handle, compression=compression, ** │
│ 206 │ │ │ │ ) │
│ 207 │ │ finally: │
│ │
│ /usr/local/lib/python3.8/site-packages/pyarrow/parquet/core.py:2964 in │
│ write_table │
│ │
│ 2961 │ row_group_size = kwargs.pop('chunk_size', row_group_size) │
│ 2962 │ use_int96 = use_deprecated_int96_timestamps │
│ 2963 │ try: │
│ ❱ 2964 │ │ with ParquetWriter( │
│ 2965 │ │ │ │ where, table.schema, │
│ 2966 │ │ │ │ filesystem=filesystem, │
│ 2967 │ │ │ │ version=version, │
│ │
│ /usr/local/lib/python3.8/site-packages/pyarrow/parquet/core.py:966 in │
│ __init__ │
│ │
│ 963 │ │ │ sink = where │
│ 964 │ │ self._metadata_collector = options.pop('metadata_collector', │
│ 965 │ │ engine_version = 'V2' │
│ ❱ 966 │ │ self.writer = _parquet.ParquetWriter( │
│ 967 │ │ │ sink, schema, │
│ 968 │ │ │ version=version, │
│ 969 │ │ │ compression=compression, │
│ │
│ /tmp/pyarrow/_parquet.pyx:1716 in pyarrow._parquet.ParquetWriter.__cinit__ │
│ │
│ [Errno 2] No such file or directory: '/tmp/pyarrow/_parquet.pyx' │
│ │
│ /tmp/pyarrow/io.pxi:1807 in pyarrow.lib.get_writer │
│ │
│ [Errno 2] No such file or directory: '/tmp/pyarrow/io.pxi' │
│ │
│ /tmp/pyarrow/io.pxi:214 in pyarrow.lib.NativeFile.get_output_stream │
│ │
│ [Errno 2] No such file or directory: '/tmp/pyarrow/io.pxi' │
│ │
│ /tmp/pyarrow/io.pxi:228 in pyarrow.lib.NativeFile._assert_writable │
│ │
│ [Errno 2] No such file or directory: '/tmp/pyarrow/io.pxi' │
│ │
│ /tmp/pyarrow/io.pxi:219 in pyarrow.lib.NativeFile._assert_open │
│ │
│ [Errno 2] No such file or directory: '/tmp/pyarrow/io.pxi' │
╰──────────────────────────────────────────────────────────────────────────────╯
ValueError: I/O operation on closed file
Runtime execution graph. Only steps that are currently running or have already completed are shown.```