def addJobs(jobs:List[Job]):Unit = {
val futures = new ListBuffer[Future[Unit]]
for ( i<- 0 to jobs.size - 1){
val future = solrAsyncClient.add(jobs(i))
futures+= future
}
Await.ready(Future.sequence(futures),Duration.Inf)
}
The test class is as follow, it helps, but not much.
val num = 10000
val batch = 350
val jobs = ListBuffer[Job]()
val start = System.currentTimeMillis()
for ( i<- 1 to num){
val job = Job("id" + i, "title" + i, "desc" + i, "industry" + i)
jobs += job
if(i % batch == 0){
SolrClientDAO.addJobs(jobs.toList)
jobs.clear()
}
}
val end = System.currentTimeMillis()
println("total time for " + num + " is " + (end-start))
println("it is " + num / ((end-start)/1000) + " jobs/second")
2. SolrJ with Latest Version
package com.sillycat.jobsconsumer.persistence
describe("SolrDAO") {
describe("#add and query"){
it("Add one single job to Solr") {
val expect = Job("id1","title1","desc1","industry1")
val num = 1000000
val batch = 300
val jobs = ListBuffer[Job]()
val start = System.currentTimeMillis()
for ( i<- 1 to num ){
val job = Job("id" + i, "title" + i, "desc" + i, "industry" + i)
jobs += job
if(i % batch == 0){
SolrJDAO.addJobs(jobs.toList)
jobs.clear()
}
}
val end = System.currentTimeMillis()
println("total time for " + num + " is " + (end-start))
val duration = (end - start) / 1000
println("it is " + num / duration + " jobs/second")
SolrJDAO.commit
}
}
}
}
The result is amazing….
INFO [pool-4-thread-2-ScalaTest-running-SolrJDAOSpec] 2015-08-07 16:04:10,009 SolrJDAO.scala (line 24) Init the SOLR Client ---------------
INFO [pool-4-thread-2-ScalaTest-running-SolrJDAOSpec] 2015-08-07 16:04:10,012 SolrJDAO.scala (line 26) SOLR URL = http://ubuntu-master:8983/solr/jobs
total time for 1000000 is 8502
it is 125000 jobs/second
3. Try to Build the Driver myself
Tips
Add the open file limit on ubuntu and mac os
sudo sh -c "ulimit -n 65535 && exec su carl"
sudo ulimit -n 10000