Reader in = new FileReader("path/to/file.csv");
Iterable<CSVRecord> records = CSVFormat.EXCEL.parse(in);
for (CSVRecord record : records) {
    String lastName = record.get("Last Name");
    String firstName = record.get("First Name");
}

        

Reader in = new FileReader("path/to/file.csv");
Iterable<CSVRecord> records = CSVFormat.RFC4180.parse(in);
for (CSVRecord record : records) {
    String columnOne = record.get(0);
    String columnTwo = record.get(1);
}

        

public enum Headers {
    ID, CustomerNo, Name
}
Reader in = new FileReader("path/to/file.csv");
Iterable<CSVRecord> records = CSVFormat.RFC4180.withHeader(Headers.class).parse(in);
for (CSVRecord record : records) {
    String id = record.get(Headers.ID);
    String customerNo = record.get(Headers.CustomerNo);
    String name = record.get(Headers.Name);
}

        

Reader in = new FileReader("path/to/file.csv");
Iterable<CSVRecord> records = CSVFormat.RFC4180.withFirstRecordAsHeader().parse(in);
for (CSVRecord record : records) {
    String id = record.get("ID");
    String customerNo = record.get("CustomerNo");
    String name = record.get("Name");
}

        

public void givenCSVFile_whenRead_thenContentsAsExpected() throws IOException {
    Reader in = new FileReader("book.csv");
    Iterable<CSVRecord> records = CSVFormat.DEFAULT
      .withHeader(HEADERS)
      .withFirstRecordAsHeader()
      .parse(in);
    for (CSVRecord record : records) {
        String author = record.get("author");
        String title = record.get("title");
        assertEquals(AUTHOR_BOOK_MAP.get(author), title);
    }
}

        

public static BeamRecord csvLine2BeamRecord(
    CSVFormat csvFormat,
    String line,
    BeamRecordSqlType beamRecordSqlType) {
  List<Object> fieldsValue = new ArrayList<>(beamRecordSqlType.getFieldCount());
  try (StringReader reader = new StringReader(line)) {
    CSVParser parser = csvFormat.parse(reader);
    CSVRecord rawRecord = parser.getRecords().get(0);

    if (rawRecord.size() != beamRecordSqlType.getFieldCount()) {
      throw new IllegalArgumentException(String.format(
          "Expect %d fields, but actually %d",
          beamRecordSqlType.getFieldCount(), rawRecord.size()
      ));
    } else {
      for (int idx = 0; idx < beamRecordSqlType.getFieldCount(); idx++) {
        String raw = rawRecord.get(idx);
        fieldsValue.add(autoCastField(beamRecordSqlType.getFieldTypeByIndex(idx), raw));
      }
    }
  } catch (IOException e) {
    throw new IllegalArgumentException("decodeRecord failed!", e);
  }
  return new BeamRecord(beamRecordSqlType, fieldsValue);
}

        

public void decode(InputStream in, List<String> headers, Consumer<DataSample<T>> mapToResult) throws IOException, DecodingDataFromAdapterException {
    try (Profiler ignored = Profiler.start("Building time series from csv data", logger::trace)) {
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, encoding))) {
            CSVFormat csvFormat = CSVFormat.DEFAULT
                    .withAllowMissingColumnNames(false)
                    .withFirstRecordAsHeader()
                    .withSkipHeaderRecord()
                    .withDelimiter(delimiter);
            Iterable<CSVRecord> records = csvFormat.parse(reader);

            for (CSVRecord csvRecord : records) {
                ZonedDateTime timeStamp = dateParser.apply(csvRecord.get(0));
                DataSample<T> tRecord = new DataSample<>(timeStamp);
                for (String h : headers) {
                    tRecord.getCells().put(h, numberParser.apply(csvRecord.get(h)));
                }
                mapToResult.accept(tRecord);
            }
        }
    }
}

        

public List<String> getDataColumnHeaders(InputStream in) throws IOException, DecodingDataFromAdapterException {
    try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, encoding))) {
        CSVFormat csvFormat = CSVFormat.DEFAULT
                .withAllowMissingColumnNames(false)
                .withDelimiter(delimiter);
        Iterable<CSVRecord> records = csvFormat.parse(reader);
        return this.parseColumnHeaders(records.iterator().next());
    }
}

        

public void map(Text key, BytesWritable value, Context context) throws IOException, InterruptedException {
    ByteArrayInputStream inputStream = new ByteArrayInputStream(value.getBytes());
    int n = value.getLength();
    byte[] bytes = new byte[n];
    inputStream.read(bytes, 0, n);
    CSVParser parser = CSVParser.parse(new String(bytes), CSVFormat.DEFAULT);
    try {
        for (CSVRecord csvRecord : parser) {
            for (String string : csvRecord) {
                word.set(string);
                context.write(word, one);
                Counter counter = context.getCounter(CountersEnum.class.getName(), CountersEnum.INPUT_WORDS.toString());
                counter.increment(1);
            }
        }
    } catch (Exception e) {
    }
}

        

private static List<String[]> getRecords(String path) {
    List<String[]> records = new ArrayList<String[]>();
    try {
        Reader reader = new FileReader(BigFileAnalyzerPerformanceTest.class.getResource(path).getPath());
        CSVFormat csvFormat = CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader();
        Iterable<CSVRecord> csvRecords = csvFormat.parse(reader);
        for (CSVRecord csvRecord : csvRecords) {
            String[] values = new String[csvRecord.size()];
            for (int i = 0; i < csvRecord.size(); i++) {
                values[i] = csvRecord.get(i);
            }
            records.add(values);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    return records;
}        
main