X Xerobit

Parse CSV in JavaScript — PapaParse, csv-parse, and Manual Parsing

Parse CSV files in JavaScript with PapaParse (browser), csv-parse (Node.js), or the native streaming API. Covers headers, quoted fields, custom delimiters, and async streaming...

Mian Ali Khalid · · 5 min read
Use the tool
CSV to JSON Converter
Convert CSV files to JSON with proper quoting and escaping.
Open CSV to JSON Converter →

Parsing CSV in JavaScript requires handling quoted fields, escaped delimiters, and multiline values. Use PapaParse in the browser and csv-parse in Node.js — both handle these edge cases correctly.

Convert CSV to JSON instantly with the CSV to JSON Converter.

PapaParse (browser and Node.js)

npm install papaparse
// Browser: parse a File input
import Papa from 'papaparse';

document.getElementById('csv-file').addEventListener('change', (e) => {
  const file = e.target.files[0];
  
  Papa.parse(file, {
    header: true,          // Use first row as column names
    dynamicTyping: true,   // Auto-convert numbers and booleans
    skipEmptyLines: true,
    complete: (results) => {
      console.log(results.data);   // Array of objects
      console.log(results.errors); // Any parse errors
      console.log(results.meta);   // Delimiter, line break, fields
    },
    error: (error) => console.error(error),
  });
});
// Parse a CSV string:
const csv = `name,age,active
Alice,30,true
Bob,25,false`;

const { data } = Papa.parse(csv, {
  header: true,
  dynamicTyping: true,
  skipEmptyLines: true,
});
// [{ name: 'Alice', age: 30, active: true }, { name: 'Bob', age: 25, active: false }]

csv-parse (Node.js — production grade)

npm install csv-parse
import { parse } from 'csv-parse/sync'; // npm install csv-parse

// Synchronous (small files):
import { readFileSync } from 'fs';

const content = readFileSync('data.csv', 'utf8');
const records = parse(content, {
  columns: true,       // Use first row as keys
  skip_empty_lines: true,
  cast: true,          // Auto-cast types
  trim: true,          // Trim whitespace
});
// Array of objects

// Async streaming (large files):
import { createReadStream } from 'fs';
import { parse } from 'csv-parse';
import { pipeline } from 'stream/promises';
import { Transform } from 'stream';

async function processLargeCSV(filePath) {
  const results = [];
  
  await pipeline(
    createReadStream(filePath),
    parse({ columns: true, skip_empty_lines: true }),
    new Transform({
      objectMode: true,
      transform(record, encoding, callback) {
        results.push(record);
        callback();
      },
    })
  );
  
  return results;
}

Handling edge cases

// PapaParse handles all of these correctly:

// Quoted fields with commas:
// "Smith, John",30,NYC → { name: 'Smith, John', age: 30, city: 'NYC' }

// Quoted fields with newlines:
// "Line 1\nLine 2",value → { notes: 'Line 1\nLine 2', value: 'value' }

// Escaped quotes:
// "He said ""hello""" → 'He said "hello"'

// Custom delimiter (TSV):
Papa.parse(tsv, { delimiter: '\t', header: true });

// Custom line ending:
Papa.parse(csv, { newline: '\r\n' });

// Different encoding:
// PapaParse accepts ArrayBuffer for encoding conversion

Manual CSV parser (understanding the algorithm)

function parseCSV(text, delimiter = ',') {
  const rows = [];
  let row = [];
  let field = '';
  let inQuotes = false;
  let i = 0;

  while (i < text.length) {
    const char = text[i];
    const next = text[i + 1];

    if (char === '"') {
      if (inQuotes && next === '"') {
        field += '"';  // Escaped quote
        i += 2;
        continue;
      }
      inQuotes = !inQuotes;
    } else if (char === delimiter && !inQuotes) {
      row.push(field);
      field = '';
    } else if ((char === '\r' || char === '\n') && !inQuotes) {
      if (char === '\r' && next === '\n') i++;  // CRLF
      if (field !== '' || row.length > 0) {
        row.push(field);
        rows.push(row);
        row = [];
        field = '';
      }
    } else {
      field += char;
    }
    i++;
  }

  if (field !== '' || row.length > 0) {
    row.push(field);
    rows.push(row);
  }

  return rows;
}

// With headers:
function parseCSVWithHeaders(text) {
  const rows = parseCSV(text);
  const headers = rows[0];
  return rows.slice(1).map(row =>
    Object.fromEntries(headers.map((h, i) => [h, row[i] ?? '']))
  );
}

Convert CSV to JSON (Node.js script)

import { readFileSync, writeFileSync } from 'fs';
import { parse } from 'csv-parse/sync';

function csvToJson(csvPath, jsonPath) {
  const content = readFileSync(csvPath, 'utf8');
  const records = parse(content, {
    columns: true,
    skip_empty_lines: true,
    cast: true,
    trim: true,
  });
  
  writeFileSync(jsonPath, JSON.stringify(records, null, 2));
  console.log(`Converted ${records.length} rows to ${jsonPath}`);
  return records;
}

csvToJson('input.csv', 'output.json');

React: CSV file upload and preview

import { useState } from 'react';
import Papa from 'papaparse';

function CSVUploader() {
  const [rows, setRows] = useState([]);
  const [headers, setHeaders] = useState([]);
  const [error, setError] = useState(null);

  function handleFile(e) {
    const file = e.target.files[0];
    if (!file) return;

    Papa.parse(file, {
      header: true,
      skipEmptyLines: true,
      complete: ({ data, errors, meta }) => {
        if (errors.length) {
          setError(errors[0].message);
          return;
        }
        setHeaders(meta.fields ?? []);
        setRows(data);
        setError(null);
      },
    });
  }

  return (
    <div>
      <input type="file" accept=".csv" onChange={handleFile} />
      {error && <p className="error">{error}</p>}
      {rows.length > 0 && (
        <table>
          <thead>
            <tr>{headers.map(h => <th key={h}>{h}</th>)}</tr>
          </thead>
          <tbody>
            {rows.slice(0, 100).map((row, i) => (
              <tr key={i}>
                {headers.map(h => <td key={h}>{String(row[h] ?? '')}</td>)}
              </tr>
            ))}
          </tbody>
        </table>
      )}
    </div>
  );
}

Related posts

Related tool

CSV to JSON Converter

Convert CSV files to JSON with proper quoting and escaping.

Written by Mian Ali Khalid. Part of the Data & Format pillar.