@lap v0.3
# Machine-readable API spec. Each @endpoint block is one API call.
@api Warehouse Connectors API
@version 1.0.0
@endpoints 10
@toc projects(10)

@endpoint GET /projects/{projectId}/warehouse-sources/imports
@desc List all warehouse imports
@returns(200) {status: any, results: [map]} # Success
@errors {401, 403}

@endpoint GET /projects/{projectId}/warehouse-sources/imports/{importId}
@desc Get a specific warehouse import
@returns(200) {status: any, results: any} # Success
@errors {401, 403, 404}

@endpoint PATCH /projects/{projectId}/warehouse-sources/imports/{importId}
@desc Update a warehouse import
@required {paused: bool # Whether to pause (true) or resume (false) the import, run_every: int(0/3600000000000/86400000000000/604800000000000) # Sync frequency in nanoseconds. Only these values are accepted: - `0` - API-triggered only (use the manual-sync endpoint to trigger) - `3600000000000` - Hourly - `86400000000000` - Daily - `604800000000000` - Weekly}
@optional {databricks_params: map{export_cluster_config: map}}
@returns(200) {status: any, results: any} # Success
@errors {400, 401, 403, 404}

@endpoint DELETE /projects/{projectId}/warehouse-sources/imports/{importId}
@desc Delete a warehouse import
@optional {delete_data: bool=false # Whether to also delete the imported data from Mixpanel}
@returns(200) {status: any} # Success
@errors {401, 403, 404}

@endpoint POST /projects/{projectId}/warehouse-sources/imports/event-stream
@desc Create an event stream import
@required {import_type: str, warehouse_source_id: int, table_params: map # Table location parameters (structure depends on warehouse type), time_column_name: str, sync_mode: str(time_based/mirror_mode/full_sync/one_time)}
@optional {event_name: str, event_column_name: str, user_column_name: str, company_column_name: str # Required for B2B projects. The column containing the company identifier., device_column_name: str, json_properties_column_name: str, run_every: int(0/3600000000000/86400000000000/604800000000000) # Sync frequency in nanoseconds. Only these values are accepted: - `0` - API-triggered only (use the manual-sync endpoint to trigger) - `3600000000000` - Hourly - `86400000000000` - Daily - `604800000000000` - Weekly, insert_time_column_name: str, property_mappings: map, databricks_params: map{export_cluster_config: map}}
@returns(200) {status: any, results: any} # Success
@errors {400, 401, 403}

@endpoint POST /projects/{projectId}/warehouse-sources/imports/people
@desc Create a people (user profiles) import
@required {import_type: str, warehouse_source_id: int, table_params: map # Table location parameters (structure depends on warehouse type), user_column_name: str, sync_mode: str(time_based/mirror_mode/full_sync/one_time)}
@optional {json_properties_column_name: str, run_every: int(0/3600000000000/86400000000000/604800000000000) # Sync frequency in nanoseconds. Only these values are accepted: - `0` - API-triggered only (use the manual-sync endpoint to trigger) - `3600000000000` - Hourly - `86400000000000` - Daily - `604800000000000` - Weekly, insert_time_column_name: str, property_mappings: map, databricks_params: map{export_cluster_config: map}}
@returns(200) {status: any, results: any} # Success
@errors {400, 401, 403}

@endpoint POST /projects/{projectId}/warehouse-sources/imports/groups
@desc Create a groups import
@required {import_type: str, warehouse_source_id: int, table_params: map # Table location parameters (structure depends on warehouse type), group_key: str, group_id_column: str, sync_mode: str(time_based/mirror_mode/full_sync/one_time)}
@optional {run_every: int(0/3600000000000/86400000000000/604800000000000) # Sync frequency in nanoseconds. Only these values are accepted: - `0` - API-triggered only (use the manual-sync endpoint to trigger) - `3600000000000` - Hourly - `86400000000000` - Daily - `604800000000000` - Weekly, insert_time_column_name: str, json_properties_column_name: str, property_mappings: map, databricks_params: map{export_cluster_config: map}}
@returns(200) {status: any, results: any} # Success
@errors {400, 401, 403}

@endpoint POST /projects/{projectId}/warehouse-sources/imports/lookup-table
@desc Create a lookup table import
@required {import_type: str, warehouse_source_id: int, table_params: map # Table location parameters (structure depends on warehouse type), mixpanel_property: map{value!: str, resourceType!: str, customPropertyId: int} # The Mixpanel property that this lookup table will be joined on. This defines the "Join Key" — the Mixpanel event or user property whose values will be matched against the `property_key_column_name` column in your warehouse table.  For example, if your events have a property called `product_id` and your warehouse lookup table has a column `product_id` containing matching values along with enrichment columns like `product_name` and `category`, you would set: - `mixpanel_property.value` = `"product_id"` (the Mixpanel property name) - `property_key_column_name` = `"product_id"` (the warehouse column name), property_key_column_name: str # The name of the column in your warehouse table that contains the join key values. This column's values will be matched against the Mixpanel property specified in `mixpanel_property.value` to look up the corresponding row., sync_mode: str(full_sync/one_time)}
@optional {run_every: int(0/3600000000000/86400000000000/604800000000000) # Sync frequency in nanoseconds. Only these values are accepted: - `0` - API-triggered only (use the manual-sync endpoint to trigger) - `3600000000000` - Hourly - `86400000000000` - Daily - `604800000000000` - Weekly, databricks_params: map{export_cluster_config: map}}
@returns(200) {status: any, results: any} # Success
@errors {400, 401, 403}

@endpoint PUT /projects/{projectId}/warehouse-sources/imports/{importId}/manual-sync
@desc Run an import
@returns(200) {status: any, results: map{run_id: str}} # Success
@errors {401, 403, 404}

@endpoint GET /projects/{projectId}/warehouse-sources/imports/{importId}/history
@desc Get import job history
@returns(200) {status: any, results: map{runs: [map]}} # Success
@errors {401, 403, 404}

@end
