tabsdata.DatabricksDestination#

class DatabricksDestination(
host_url: str,
token: str | Secret,
tables: list[str] | str,
volume: str,
catalog: str | None = None,
schema: str | None = None,
warehouse: str | None = None,
warehouse_id: str | None = None,
if_table_exists: Literal['append', 'replace'] = 'append',
schema_strategy: Literal['update', 'strict'] = 'update',
**kwargs,
)#

Bases: DestinationPlugin

__init__(
host_url: str,
token: str | Secret,
tables: list[str] | str,
volume: str,
catalog: str | None = None,
schema: str | None = None,
warehouse: str | None = None,
warehouse_id: str | None = None,
if_table_exists: Literal['append', 'replace'] = 'append',
schema_strategy: Literal['update', 'strict'] = 'update',
**kwargs,
)#
Initializes the DatabricksDestination with the configuration desired to store

the data.

Args:

Methods

__init__(host_url, token, tables, volume[, ...])

Initializes the DatabricksDestination with the configuration desired to store

chunk(working_dir, *results)

Trigger the exporting of the data to local parquet chunks. This method will

stream(working_dir, *results)

Trigger the exporting of the data. This method will receive the resulting data

write(files)

This method is used to write the files to the databricks.

Attributes

host_url

if_table_exists

The strategy to follow when the table already exists.

schema_strategy

The strategy to follow when appending to a table with an existing schema.

tables

token