How to deal with a 50GB large csv file in r language?

question

all <- read.csv.ffdf(
  file="<path of large file>",
  sep = ",",
  header=TRUE,
  VERBOSE=TRUE,
  first.rows=10000,
  next.rows=50000,
  )

answer

library(sqldf)

iris2 <- read.csv.sql("iris.csv",
    sql = "select * from file where Species = 'setosa' ")