Examples & Recipes
Tested commands and real-world examples for common LakeXpress scenarios. Linux syntax shown; on Windows PowerShell, replace \ with ` and ./LakeXpress with .\LakeXpress.exe.
Table of Contents
- Database Lifecycle Commands
- Configuration Management
- Local Exports
- AWS S3 Exports
- Azure Storage Exports
- Google Cloud Storage Exports
- OneLake Exports
- Snowflake Publishing
- Databricks Publishing
- AWS Glue Publishing
- Microsoft Fabric Publishing
- Table Filtering
- Incremental Sync
- Resume Failed Exports
Database Lifecycle Commands
# Initialize the LakeXpress DB schema
./LakeXpress lxdb init -a credentials.json --lxdb_auth_id lxdb_ms
# Drop the LakeXpress DB schema (requires --confirm)
./LakeXpress lxdb drop -a credentials.json --lxdb_auth_id lxdb_ms --confirm
# Clear all data, keep the schema
./LakeXpress lxdb truncate -a credentials.json --lxdb_auth_id lxdb_ms
# Show locked tables (incremental syncs only)
./LakeXpress lxdb locks -a credentials.json --lxdb_auth_id lxdb_ms
# Release stale or stuck locks
./LakeXpress lxdb release-locks -a credentials.json --lxdb_auth_id lxdb_ms --confirm
Different LakeXpress DB Types
# SQL Server as LakeXpress DB
./LakeXpress lxdb init -a credentials.json --lxdb_auth_id lxdb_ms_02
# PostgreSQL as LakeXpress DB
./LakeXpress lxdb init -a credentials.json --lxdb_auth_id lxdb_pg
# DuckDB as LakeXpress DB
./LakeXpress lxdb init -a credentials.json --lxdb_auth_id lxdb_duckdb
# MySQL as LakeXpress DB
./LakeXpress lxdb init -a credentials.json --lxdb_auth_id mysql_log_01
# SQLite as LakeXpress DB
./LakeXpress lxdb init -a credentials.json --lxdb_auth_id sqlite_01
Configuration Management
# Create a new sync configuration
./LakeXpress config create [OPTIONS]
# List all sync configurations
./LakeXpress config list -a credentials.json --lxdb_auth_id lxdb_ms
# Delete a sync configuration
./LakeXpress config delete -a credentials.json --lxdb_auth_id lxdb_ms --sync_id <SYNC_ID>
# Run from a legacy YAML config file
./LakeXpress run -c config_20251222.yml
Local Exports
PostgreSQL Source
PostgreSQL to Local (SQL Server LakeXpress DB)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--output_dir /tmp/tpch/ \
--generate_metadata
./LakeXpress sync
SQL Server Source
SQL Server to Local (PostgreSQL LakeXpress DB)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_pg \
--source_db_auth_id ds_10_ms \
--source_db_name adventureworksdw \
--source_schema_name dbo \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--output_dir /tmp/tpch/ \
--generate_metadata
./LakeXpress sync
SQL Server to Local (SQLite LakeXpress DB)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id sqlite_01 \
--source_db_auth_id ds_10_ms \
--source_db_name adventureworksdw \
--source_schema_name dbo \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--output_dir /tmp/tpch/ \
--generate_metadata
./LakeXpress sync
SQL Server to Local (DuckDB LakeXpress DB)
./LakeXpress lxdb init -a credentials.json --lxdb_auth_id lxdb_duckdb
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_duckdb \
--source_db_auth_id ds_10_ms \
--source_db_name adventureworksdw \
--source_schema_name dbo \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--output_dir /tmp/tpch/ \
--generate_metadata
./LakeXpress sync
SQL Server to Local (MySQL LakeXpress DB)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id mysql_log_01 \
--source_db_auth_id ds_10_ms \
--source_db_name adventureworksdw \
--source_schema_name dbo \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--output_dir /tmp/tpch/ \
--generate_metadata
./LakeXpress sync
MySQL Source
MySQL to Local (SQL Server LakeXpress DB)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id mysql_01 \
--source_db_name tpch \
--source_schema_name tpch \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--output_dir /tmp/tpch/ \
--generate_metadata
./LakeXpress sync
MariaDB Source
MariaDB to Local (SQL Server LakeXpress DB)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id maria_01 \
--source_db_name tpch_mariadb \
--source_schema_name tpch_mariadb \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--output_dir /tmp/tpch/ \
--generate_metadata
./LakeXpress sync
AWS S3 Exports
PostgreSQL to S3
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata
./LakeXpress sync
PostgreSQL to S3 (OVH)
Uses OVH S3-compatible storage with custom endpoint via AWS profile:
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id s3_02 \
--generate_metadata
./LakeXpress sync
credentials.json:
{
"s3_02": {
"ds_type": "s3",
"auth_mode": "profile",
"info": {
"directory": "s3://my-ovh-bucket/exports",
"profile": "ovh"
}
}
}
~/.aws/config:
[profile ovh]
endpoint_url = https://s3.gra.io.cloud.ovh.net
region = gra
~/.aws/credentials:
[ovh]
aws_access_key_id = YOUR_OVH_ACCESS_KEY
aws_secret_access_key = YOUR_OVH_SECRET_KEY
PostgreSQL to S3 with Sub-Path and Table Filter
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name "tpch_1%" \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--sub_path staging/data \
--include "nation%" \
--generate_metadata
./LakeXpress sync
MySQL to S3
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id mysql_01 \
--source_db_name tpch \
--source_schema_name tpch \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--sub_path mysql_export \
--include "nation%" \
--generate_metadata
./LakeXpress sync
MariaDB to S3
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id maria_01 \
--source_db_name tpch_mariadb \
--source_schema_name tpch_mariadb \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--sub_path mariadb_export \
--include "nation%" \
--generate_metadata
./LakeXpress sync
Azure Storage Exports
PostgreSQL to Azure
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id azure_01 \
--generate_metadata
./LakeXpress sync
SQL Server to Azure
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_pg \
--source_db_auth_id ds_11_ms \
--source_db_name adventureworks2019 \
--source_schema_name sales \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id azure_01 \
--sub_path adventureworks/sales \
--generate_metadata
./LakeXpress sync
credentials.json (Azure with Service Principal):
{
"azure_01": {
"ds_type": "azure",
"auth_mode": "service_principal",
"info": {
"directory": "abfss://datalake.dfs.core.windows.net/exports",
"azure_client_id": "your-application-client-id",
"azure_tenant_id": "your-directory-tenant-id",
"azure_client_secret": "$env{LX_AZURE_CLIENT_SECRET}"
}
},
"lxdb_pg": {
"ds_type": "postgres",
"auth_mode": "classic",
"info": {
"username": "$env{LX_LXDB_USER}",
"password": "$env{LX_LXDB_PASSWORD}",
"server": "localhost",
"port": 5432,
"database": "lakexpress_log"
}
},
"ds_11_ms": {
"ds_type": "mssql",
"auth_mode": "classic",
"info": {
"username": "$env{LX_MSSQL_USER}",
"password": "$env{LX_MSSQL_PASSWORD}",
"server": "localhost",
"port": 1433,
"database": "adventureworks2019"
}
}
}
Google Cloud Storage Exports
PostgreSQL to GCS
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id gcs_01 \
--generate_metadata
./LakeXpress sync
OneLake Exports
PostgreSQL to OneLake
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id fabric_onelake \
--generate_metadata
./LakeXpress sync
Snowflake Publishing
Export to S3, then create Snowflake tables.
Snowflake External Tables
Data stays in S3; queryable via Snowflake.
External Tables with Views (Default)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name "tpch_1%" \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path snowflake_ext \
--include "nation%" \
--publish_target snowflake_pat
./LakeXpress sync
External Tables without Views
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name "tpch_1%" \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path snowflake_ext \
--include "nation%" \
--publish_target snowflake_pat \
--no_views
./LakeXpress sync
Snowflake Internal Tables
Data loaded into Snowflake storage.
Basic Internal Tables
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name "tpch_1%" \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path snowflake_int \
--include "nation%" \
--publish_method internal \
--publish_schema_pattern "INT_{schema}" \
--publish_target snowflake_pat
./LakeXpress sync
Internal Tables with Custom Naming
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path production \
--publish_method internal \
--publish_schema_pattern "INT_{subpath}_{date}" \
--publish_table_pattern "{schema}_{table}" \
--publish_target snowflake_pat
./LakeXpress sync
Internal Tables with Primary Key Constraints
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path production \
--publish_method internal \
--publish_schema_pattern "INT_{subpath}_{date}" \
--publish_table_pattern "{schema}_{table}" \
--publish_target snowflake_pat \
--snowflake_pk_constraints
./LakeXpress sync
SQL Server to Snowflake Internal
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_10_ms \
--source_db_name adventureworksdw \
--source_schema_name dbo \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--publish_method internal \
--publish_schema_pattern "adventureworksdw" \
--publish_table_pattern "{table}" \
--publish_target snowflake_pat
./LakeXpress sync
MySQL to Snowflake (External)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id mysql_01 \
--source_db_name tpch \
--source_schema_name tpch \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path mysql_data \
--include "nation%" \
--publish_target snowflake_pat
./LakeXpress sync
MySQL to Snowflake (Internal)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id mysql_01 \
--source_db_name tpch \
--source_schema_name tpch \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path mysql_data \
--include "nation%" \
--publish_method internal \
--publish_schema_pattern "INT_{schema}" \
--publish_target snowflake_pat
./LakeXpress sync
MariaDB to Snowflake (External)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id maria_01 \
--source_db_name tpch_mariadb \
--source_schema_name tpch_mariadb \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path mariadb_data \
--include "nation%" \
--publish_target snowflake_pat
./LakeXpress sync
MariaDB to Snowflake (Internal)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id maria_01 \
--source_db_name tpch_mariadb \
--source_schema_name tpch_mariadb \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_01 \
--generate_metadata \
--sub_path mariadb_data \
--include "nation%" \
--publish_method internal \
--publish_schema_pattern "INT_{schema}" \
--publish_target snowflake_pat
./LakeXpress sync
Databricks Publishing
Export to S3, then create Databricks Unity Catalog tables.
PostgreSQL to Databricks (External)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_02 \
--publish_target databricks_01 \
--publish_method external
./LakeXpress sync
PostgreSQL to Databricks (Internal/Managed Delta Tables)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_02 \
--publish_target databricks_01 \
--publish_method internal
./LakeXpress sync
SQL Server to Databricks (Internal)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_10_ms \
--source_db_name adventureworksdw \
--source_schema_name dbo \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_02 \
--publish_target databricks_01 \
--publish_schema_pattern "adventureworksdw" \
--publish_method internal
./LakeXpress sync
AWS Glue Publishing
Export to S3, then register tables in AWS Glue Data Catalog.
PostgreSQL to AWS Glue
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--target_storage_id aws_s3_01 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 4 \
--n_jobs 2 \
--publish_target glue_01 \
--publish_schema_pattern "lakexpress_{schema}" \
--publish_table_pattern "{table}"
./LakeXpress sync
PostgreSQL to AWS Glue with Custom FastBCP Config
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--target_storage_id aws_s3_01 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--fastbcp_table_config "lineitem:DataDriven:TO_CHAR(l_shipdate, 'YYYY'):8;orders:Ctid::4" \
--n_jobs 2 \
--publish_target glue_01 \
--publish_schema_pattern "lakexpress_{schema}" \
--publish_table_pattern "{table}"
./LakeXpress sync
SQL Server to AWS Glue
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_09_ms \
--source_db_name tpch \
--source_schema_name tpch_1 \
--target_storage_id aws_s3_01 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 4 \
--n_jobs 2 \
--publish_target glue_01 \
--publish_schema_pattern "lakexpress_{schema}" \
--publish_table_pattern "{table}"
./LakeXpress sync
SQL Server to AWS Glue with Custom FastBCP Config
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_09_ms \
--source_db_name tpch \
--source_schema_name tpch_1 \
--target_storage_id aws_s3_01 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--fastbcp_table_config "lineitem:DataDriven:YEAR(l_shipdate):8;orders:Physloc::4" \
--n_jobs 2 \
--publish_target glue_01 \
--publish_database_name "lakexpress_tpch" \
--publish_table_pattern "{schema}_{table}"
./LakeXpress sync
SQL Server AdventureWorks to AWS Glue
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_10_ms \
--source_db_name adventureworksdw \
--source_schema_name dbo \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--fastbcp_p 2 \
--n_jobs 4 \
--target_storage_id aws_s3_02 \
--publish_target glue_01 \
--publish_schema_pattern "adventureworksdw"
./LakeXpress sync
Microsoft Fabric Publishing
Export to OneLake, then create Fabric Lakehouse tables.
PostgreSQL to Fabric (Internal/Delta Tables)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--target_storage_id fabric_onelake \
--publish_target fabric_01 \
--publish_method internal \
--fastbcp_p 2 \
--n_jobs 4 \
--generate_metadata
./LakeXpress sync
PostgreSQL to Fabric (External/SQL Views)
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1 \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--target_storage_id fabric_onelake \
--publish_target fabric_01 \
--publish_method external \
--publish_table_pattern "VW_{schema}_{table}" \
--fastbcp_p 2 \
--n_jobs 4 \
--generate_metadata
./LakeXpress sync
Table Filtering
Include Specific Table Patterns
Export only fact and dimension tables:
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_postgres \
--source_db_auth_id source_postgres \
--source_schema_name public \
--include "fact_%, dim_%" \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--output_dir ./exports \
--n_jobs 4
./LakeXpress sync
Includes fact_sales, fact_orders, dim_customer, dim_product, etc.
Exclude Temporary and Test Tables
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_postgres \
--source_db_auth_id source_postgres \
--source_schema_name public \
--exclude "temp_%, test_%, staging_%" \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--output_dir ./exports \
--n_jobs 4
./LakeXpress sync
Combine Include and Exclude
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_postgres \
--source_db_auth_id source_postgres \
--source_schema_name analytics \
--include "report_%, dashboard_%" \
--exclude "%_test, %_backup" \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--output_dir ./exports \
--n_jobs 4
./LakeXpress sync
Includes report_sales, dashboard_kpi. Excludes report_sales_test, dashboard_kpi_backup.
Incremental Sync
Watermark-based delta exports.
Basic Incremental Sync
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_ms_02 \
--source_db_auth_id ds_04_pg \
--source_db_name tpch \
--source_schema_name tpch_1_incremental \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--target_storage_id aws_s3_01 \
--incremental_table "tpch_1_incremental.lineitem:l_shipdate:date" \
--incremental_table "tpch_1_incremental.orders:o_orderdate:date" \
--generate_metadata
# First sync exports all data; subsequent syncs export only new/changed data
./LakeXpress sync
Daily Order Updates
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_postgres \
--source_db_auth_id source_postgres \
--source_db_name ecommerce \
--source_schema_name public \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--target_storage_id s3_01 \
--incremental_table "public.orders:created_at:datetime" \
--incremental_table "public.order_items:created_at:datetime" \
--publish_target snowflake_prod \
--n_jobs 4
# Run daily via cron or scheduler
./LakeXpress sync
First run exports all rows. Subsequent runs export only new orders; other tables are fully exported each time.
Event Log Ingestion
./LakeXpress config create \
-a credentials.json \
--lxdb_auth_id lxdb_postgres \
--source_db_auth_id source_postgres \
--source_db_name events \
--source_schema_name events \
--fastbcp_dir_path ./FastBCP_linux-x64/latest/ \
--target_storage_id aws_s3_01 \
--incremental_table "events.pageviews:event_time:timestamp" \
--incremental_table "events.clicks:event_time:timestamp" \
--incremental_table "events.conversions:event_time:timestamp" \
--incremental_safety_lag 600 \
--sub_path production/events \
--n_jobs 8 \
--fastbcp_p 4
# Run every 10 minutes
./LakeXpress sync
Debug Mode
./LakeXpress sync --log_lev DEBUG
Resume Failed Exports
When a sync starts, note the run ID from output:
2025-10-31 10:15:23 | INFO | Starting sync run: 20251031-2f73b4d0-8647-11ef-8089-c403a82a4577
Resume with:
./LakeXpress sync --run_id 20251031-2f73b4d0-8647-11ef-8089-c403a82a4577 --resume
This skips completed tables and retries failed ones.
Query Failed Tables
SELECT
source_schema,
source_table,
status,
error_message,
started_at,
finished_at
FROM jobs
WHERE run_id = '20251031-2f73b4d0-8647-11ef-8089-c403a82a4577'
AND status = 'failed'
ORDER BY source_schema, source_table;