🔄 Powerful SQL to JSON converter with support for large files and multiple output formats. Converts SQL database dumps to structured JSON files.
- 🚀 Large file processing: Stream processing for SQL files up to GB size
- 📁 Multiple output modes:
- Separate files: Each table becomes a separate JSON file (default)
- Combined file: All tables in one JSON file
- 💾 Smart output: Automatically creates
json-output
directory with summary file - ⚡ High performance: Batch processing and memory optimization
- 🛡️ Error resilient: Skip unparsable statements and continue processing
- 📊 Progress tracking: Real-time progress and memory usage
- 🎯 CLI & Library: Can be used as both CLI tool and JavaScript library
npx sql-to-json-converter database.sql
npm install -g sql-to-json-converter
sql-to-json database.sql
npm install sql-to-json-converter
# Export each table as separate file in json-output/ directory
npx sql-to-json-converter database.sql
# Specify different output directory
npx sql-to-json-converter database.sql --output-dir my-tables
# With additional options
npx sql-to-json-converter database.sql --memory --batch-size 1000
# Export everything to a single JSON file
npx sql-to-json-converter database.sql --combined --output result.json
# Export to stdout
npx sql-to-json-converter database.sql --combined
# Process large files with memory monitoring
npx sql-to-json-converter large-db.sql --memory --limit 100000
# Skip unparsable statements (faster processing)
npx sql-to-json-converter database.sql --skip-unparsable
# Custom batch size for performance tuning
npx sql-to-json-converter database.sql --batch-size 2000
const { convertSQLToJSONFiles, convertSQLToJSON } = require('sql-to-json-converter');
// Read SQL file and convert to separate files
const sqlContent = fs.readFileSync('database.sql', 'utf8');
const result = convertSQLToJSONFiles(sqlContent, 'output-folder');
console.log(`Converted ${result.metadata.totalTables} tables`);
// Or convert to combined JSON
const combined = convertSQLToJSON(sqlContent);
console.log(combined.tables);
const { SQLToJSONConverter } = require('sql-to-json-converter');
const converter = new SQLToJSONConverter({
batchSize: 1000,
showMemory: true,
outputMode: 'separate',
outputDir: 'my-json-data'
});
// Process large file with streaming
converter.processLargeSQL('huge-database.sql').then(() => {
console.log('Conversion completed!');
});
// High-level functions
convertSQLToJSON(content, options) // -> Combined JSON object
convertSQLToJSONFiles(content, outputDir) // -> Separate files + summary
processLargeSQLFile(inputFile, outputFile) // -> Stream processing
// Advanced usage
createConverter(options) // -> SQLToJSONConverter instance
CREATE TABLE users (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(255) NOT NULL,
email VARCHAR(255) UNIQUE
);
INSERT INTO users VALUES (1, 'John Doe', 'john@example.com');
INSERT INTO users VALUES (2, 'Jane Smith', 'jane@example.com');
CREATE TABLE products (
id INT PRIMARY KEY,
name VARCHAR(100),
price DECIMAL(10,2)
);
INSERT INTO products VALUES (1, 'Laptop', 999.99);
INSERT INTO products VALUES (2, 'Mouse', 25.50);
json-output/
├── _summary.json # Overview of all tables
├── users.json # User table data
└── products.json # Product table data
users.json:
{
"tableName": "users",
"columns": [
{"name": "id", "type": "INT PRIMARY KEY AUTO_INCREMENT"},
{"name": "name", "type": "VARCHAR(255) NOT NULL"},
{"name": "email", "type": "VARCHAR(255) UNIQUE"}
],
"recordCount": 2,
"generatedAt": "2024-01-20T10:30:00.000Z",
"data": [
{"id": 1, "name": "John Doe", "email": "john@example.com"},
{"id": 2, "name": "Jane Smith", "email": "jane@example.com"}
]
}
_summary.json:
{
"generatedAt": "2024-01-20T10:30:00.000Z",
"totalTables": 2,
"totalRecords": 4,
"tables": [
{"name": "users", "recordCount": 2, "fileName": "users.json"},
{"name": "products", "recordCount": 2, "fileName": "products.json"}
]
}
Option | Description | Default |
---|---|---|
--help, -h |
Show help | |
--version, -v |
Show version | |
--separate |
Export separate files (default) | ✅ |
--combined |
Export combined file | |
--output [file] |
Output file for combined mode | |
--output-dir [dir] |
Output directory for separate mode | json-output |
--memory, -m |
Show memory usage | |
--batch-size [num] |
Batch size for processing | 500 |
--limit [num] |
Limit number of statements | |
--skip-unparsable |
Skip unparsable statements |
- < 10MB: In-memory processing
- > 10MB: Automatic stream processing
- > 100MB: Recommended to use
--memory
flag - > 1GB: Recommended to increase
--batch-size
to 2000+
# For very large files (> 1GB)
npx sql-to-json-converter huge-db.sql \
--memory \
--batch-size 5000 \
--skip-unparsable \
--output-dir large-output
Statement | Support | Description |
---|---|---|
CREATE TABLE | ✅ Full | Table structure, columns, constraints |
INSERT INTO | ✅ Full | Single and multiple value sets |
VALUES | ✅ Full | Quoted strings, numbers, NULL |
DROP TABLE | ✅ Skip | Ignored during processing |
Comments | ✅ Full | -- line comments |
Transactions | ✅ Basic | START TRANSACTION, COMMIT |
git clone <repo-url>
cd sql-to-json-converter
npm install
# Create test SQL file
echo "CREATE TABLE test (id INT); INSERT INTO test VALUES (1);" > test.sql
# Test CLI
npm start test.sql
# Test library
node -e "
const {convertSQLToJSONFiles} = require('./index');
const fs = require('fs');
const sql = fs.readFileSync('test.sql', 'utf8');
console.log(convertSQLToJSONFiles(sql));
"
# Update version
npm version patch|minor|major
# Publish to npm
npm publish
const options = {
batchSize: 1000, // Processing batch size
showMemory: true, // Show memory usage
limit: 50000, // Max statements to process
skipUnparsable: true, // Skip invalid statements
outputMode: 'separate', // 'separate' or 'combined'
outputDir: 'my-output' // Output directory name
};
1. Memory errors with large files
# Reduce batch size and enable memory monitoring
npx sql-to-json-converter large-file.sql --batch-size 200 --memory
2. Unparsable statements
# Skip invalid statements
npx sql-to-json-converter problematic.sql --skip-unparsable
3. Too slow with very large files
# Increase batch size and skip unparsable
npx sql-to-json-converter huge.sql --batch-size 2000 --skip-unparsable
MIT License
- Fork the repository
- Create feature branch (
git checkout -b feature/amazing-feature
) - Commit changes (
git commit -m 'Add amazing feature'
) - Push to branch (
git push origin feature/amazing-feature
) - Open Pull Request
- 🐛 Bug reports: GitHub Issues
- 📧 Email: thangdevalone@gmail.com