{
  "$schema": "https://json-schema.org/draft/2020-12/schema",
  "features": {
    "core": [
      {
        "id": "parallel-transfer",
        "name": "Parallel Data Transfer",
        "category": "Performance",
        "description": "Automatically split large tables into multiple parallel streams for maximum throughput during database-to-database transfers",
        "benefits": [
          "Linear scaling with CPU cores",
          "Reduced transfer time by 80-95%",
          "Automatic workload distribution",
          "No manual partitioning required"
        ],
        "technicalDetails": {
          "parameters": ["--parallel", "--split-by"],
          "autoDetection": "Automatically detects numeric/date columns for splitting",
          "threadManagement": "Dynamic thread pool management",
          "loadBalancing": "Intelligent chunk size calculation"
        },
        "icon": "⚡"
      },
      {
        "id": "single-executable",
        "name": "Single Executable Deployment",
        "category": "Deployment",
        "description": "Complete solution in one executable file with all database drivers embedded",
        "benefits": [
          "Zero installation time",
          "No dependency conflicts",
          "Consistent behavior across environments",
          "Simplified version management"
        ],
        "technicalDetails": {
          "size": "~50MB",
          "embeddedDrivers": ["PostgreSQL", "MySQL", "Oracle", "SQL Server", "ODBC"],
          "platforms": ["Windows x64", "Linux x64"],
          "requirements": "None - fully self-contained"
        },
        "icon": "📦"
      },
      {
        "id": "streaming-architecture",
        "name": "Streaming Architecture",
        "category": "Performance",
        "description": "Process data row-by-row without loading entire datasets into memory during transfers or imports",
        "benefits": [
          "Constant memory usage regardless of data volume",
          "Transfer terabytes on standard hardware",
          "Reduced database memory pressure",
          "Immediate data availability in target"
        ],
        "technicalDetails": {
          "bufferSize": "Configurable (default 64KB)",
          "memoryUsage": "Typically < 500MB",
          "dataFlow": "Source → Memory Buffer → Target Database",
          "backpressure": "Automatic flow control"
        },
        "icon": "🌊"
      },
      {
        "id": "cross-database-transfer",
        "name": "Cross-Database Platform Transfer",
        "category": "Integration",
        "description": "Transfer data between any combination of supported databases with automatic schema mapping",
        "benefits": [
          "Seamless cross-platform migration",
          "Automatic data type conversion",
          "Schema mapping and validation",
          "No intermediate files or staging"
        ],
        "technicalDetails": {
          "supportedDatabases": ["PostgreSQL", "MySQL", "MariaDB", "Oracle", "SQL Server", "ClickHouse", "SAP HANA", "Teradata", "Netezza", "DuckDB"],
          "schemaMapping": "Automatic data type conversion",
          "features": ["NULL handling", "Unicode support", "Large object support", "Identity column handling"]
        },
        "icon": "🔄"
      },
      {
        "id": "file-import",
        "name": "File Import to Database",
        "category": "Import",
        "description": "Import CSV, Parquet, JSON, and BSON files directly into databases with automatic schema detection",
        "benefits": [
          "No ETL pipeline needed",
          "Automatic schema detection",
          "Parallel file processing",
          "Support for compressed files"
        ],
        "technicalDetails": {
          "formats": ["CSV", "Parquet", "JSON", "XLSX", "BSON"],
          "encoding": "UTF-8, UTF-16, custom",
          "compression": ["None", "gzip", "Snappy", "Brotli"],
          "features": ["Schema inference", "Data type detection", "Parallel loading", "Error handling"]
        },
        "icon": "📥"
      },
      {
        "id": "enterprise-logging",
        "name": "Enterprise Logging",
        "category": "Monitoring",
        "description": "Comprehensive logging with multiple output destinations and formats",
        "benefits": [
          "Full audit trail for compliance",
          "Easy troubleshooting",
          "Integration with monitoring tools",
          "Performance metrics tracking"
        ],
        "technicalDetails": {
          "destinations": ["Console", "File", "Windows Event Log", "Syslog"],
          "formats": ["Text", "JSON", "Structured"],
          "levels": ["Debug", "Info", "Warning", "Error"],
          "metrics": ["Rows/sec", "Bytes/sec", "Progress %", "Estimated completion"]
        },
        "icon": "📊"
      },
      {
        "id": "security",
        "name": "Enterprise Security",
        "category": "Security",
        "description": "Comprehensive security features for enterprise environments",
        "benefits": [
          "Secure credential handling",
          "SQL injection prevention",
          "Compliance-ready logging",
          "Encrypted connections"
        ],
        "technicalDetails": {
          "features": ["Password obfuscation", "SSL/TLS support", "Credential encryption", "Digitally signed executable"],
          "compliance": ["GDPR compatible", "SOC 2 ready", "HIPAA compliant"],
          "authentication": "Support for Windows Auth, Kerberos, OAuth"
        },
        "icon": "🔒"
      },
      {
        "id": "error-handling",
        "name": "Robust Error Handling",
        "category": "Reliability",
        "description": "Advanced error handling with automatic retry and detailed error reporting",
        "benefits": [
          "Automatic retry on transient errors",
          "Detailed error messages",
          "Transaction support",
          "Checkpoint and resume capability"
        ],
        "technicalDetails": {
          "retry": "Configurable retry policy",
          "logging": "Detailed error stack traces",
          "recovery": "Checkpoint files for resume",
          "validation": "Data integrity checks"
        },
        "icon": "🛡️"
      },
      {
        "id": "schema-mapping",
        "name": "Automatic Schema Mapping",
        "category": "Migration",
        "description": "Intelligent schema mapping between different database platforms",
        "benefits": [
          "No manual schema conversion",
          "Data type compatibility checks",
          "Constraint handling",
          "Index recreation support"
        ],
        "technicalDetails": {
          "mappings": "Cross-database data type mappings",
          "validation": "Schema compatibility validation",
          "options": "Customizable mapping rules",
          "constraints": "Primary keys, foreign keys, indexes"
        },
        "icon": "🗺️"
      }
    ],
    "supportedFormats": [
      {
        "format": "CSV",
        "direction": "Import to Database",
        "description": "Import CSV files with various delimiters and encodings",
        "features": ["Custom delimiters", "Header detection", "Quote handling", "UTF-8/UTF-16 support"],
        "compression": ["None", "gzip", "zip"],
        "useCases": ["Legacy data import", "Excel imports", "Log file imports", "Flat file migration"]
      },
      {
        "format": "Parquet",
        "direction": "Import to Database",
        "description": "Import Parquet files from data lakes into databases",
        "features": ["Schema preservation", "Nested data support", "Partition awareness", "Column selection"],
        "compression": ["Snappy", "gzip", "Brotli", "LZ4"],
        "useCases": ["Data lake ingestion", "Analytics imports", "Historical data loading", "Hadoop migration"]
      },
      {
        "format": "JSON",
        "direction": "Import to Database",
        "description": "Import JSON files and JSON Lines format",
        "features": ["Nested object flattening", "Array handling", "Schema detection", "JSONL support"],
        "compression": ["None", "gzip"],
        "useCases": ["API data import", "NoSQL migration", "Log aggregation", "Document import"]
      },
      {
        "format": "Excel (XLSX)",
        "direction": "Import to Database",
        "description": "Import Excel spreadsheets directly",
        "features": ["Multi-sheet support", "Header detection", "Data type inference", "Formula evaluation"],
        "compression": ["Native XLSX"],
        "useCases": ["Business data import", "Manual data loads", "Reporting imports", "Spreadsheet migration"]
      },
      {
        "format": "BSON",
        "direction": "Import to Database",
        "description": "Import MongoDB BSON files",
        "features": ["Binary data support", "ObjectId handling", "Date conversion", "Embedded documents"],
        "compression": ["None", "gzip"],
        "useCases": ["MongoDB migration", "NoSQL to SQL transfers", "Document database import"]
      }
    ],
    "databaseIntegration": {
      "sources": [
        {
          "database": "PostgreSQL",
          "versions": ["9.6+", "10+", "11+", "12+", "13+", "14+", "15+", "16+"],
          "features": ["Native driver", "Streaming", "Array types", "JSON columns", "COPY protocol"],
          "authentication": ["Password", "MD5", "SCRAM-SHA-256", "Certificate"]
        },
        {
          "database": "MySQL / MariaDB",
          "versions": ["5.7+", "8.0+", "MariaDB 10.x"],
          "features": ["Native driver", "Streaming", "Binary protocol", "UTF-8 support"],
          "authentication": ["Password", "Native auth", "SHA-256", "Caching SHA-2"]
        },
        {
          "database": "Oracle",
          "versions": ["11g", "12c", "18c", "19c", "21c", "23c"],
          "features": ["OCI driver", "Streaming", "LOB support", "RAC support", "Partitioning"],
          "authentication": ["Password", "OS auth", "Kerberos", "Wallet"]
        },
        {
          "database": "SQL Server",
          "versions": ["2012+", "2014+", "2016+", "2017+", "2019+", "2022+"],
          "features": ["Native driver", "Streaming", "Azure SQL", "Windows Auth", "Always Encrypted"],
          "authentication": ["SQL auth", "Windows auth", "Azure AD", "Managed identity"]
        },
        {
          "database": "ClickHouse",
          "versions": ["20.x+", "21.x+", "22.x+", "23.x+", "24.x+"],
          "features": ["Native protocol", "Streaming", "Compression", "Partitioning", "MergeTree"],
          "authentication": ["User/Password", "Custom headers"]
        },
        {
          "database": "SAP HANA",
          "versions": ["2.0 SPS 03+"],
          "features": ["ODBC driver", "Column store", "In-memory tables", "Partitioning"],
          "authentication": ["User/Password", "SAML", "Kerberos"]
        },
        {
          "database": "Teradata",
          "versions": ["15.x", "16.x", "17.x"],
          "features": ["ODBC driver", "FastLoad protocol", "Parallel loading", "TPT support"],
          "authentication": ["User/Password", "LDAP"]
        },
        {
          "database": "Netezza",
          "versions": ["7.x"],
          "features": ["ODBC driver", "External tables", "Zone maps", "Distribution keys"],
          "authentication": ["User/Password"]
        },
        {
          "database": "DuckDB",
          "versions": ["0.8+", "0.9+", "0.10+"],
          "features": ["Native driver", "In-memory", "Parquet support", "Arrow integration"],
          "authentication": ["File-based"]
        }
      ],
      "targets": [
        {
          "database": "PostgreSQL",
          "features": ["COPY protocol", "Bulk insert", "Transaction control", "Constraint handling", "Upsert support"]
        },
        {
          "database": "MySQL / MariaDB",
          "features": ["LOAD DATA LOCAL INFILE", "Bulk insert", "Transaction control", "Auto-increment handling"]
        },
        {
          "database": "SQL Server",
          "features": ["Bulk insert", "Table-valued parameters", "Transaction control", "Identity insert", "Merge support"]
        },
        {
          "database": "Oracle",
          "features": ["Direct path load", "Bulk insert", "Sequence handling", "LOB handling", "Parallel DML"]
        },
        {
          "database": "ClickHouse",
          "features": ["Bulk insert", "Partition optimization", "Compression", "MergeTree optimization", "Async insert"]
        }
      ]
    }
  },
  "metadata": {
    "lastUpdated": "2026-02-20",
    "dataFormat": "JSON",
    "purpose": "Feature catalog for AI agents",
    "audience": ["AI agents", "LLMs", "chatbots"],
    "schemaVersion": "1.0"
  }
}
