Skip to content

Commit

Permalink
updated worker spec
Browse files Browse the repository at this point in the history
  • Loading branch information
stewartmckee committed Nov 26, 2013
1 parent c50525c commit b5ccd69
Showing 1 changed file with 8 additions and 7 deletions.
15 changes: 8 additions & 7 deletions spec/cobweb/crawl_worker_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
if SIDEKIQ_INSTALLED
#store all existing resque process ids so we don't kill them afterwards
@existing_processes = `ps aux | grep sidekiq | grep -v grep | awk '{print $2}'`.split("\n")
puts @existing_processes
@existing_processes.should be_empty

raise "Sidekiq is already running, please stop before running specs." if @existing_processes.count > 0

# START WORKERS ONLY FOR CRAWL QUEUE SO WE CAN COUNT ENQUEUED PROCESS AND FINISH QUEUES
puts "Starting Workers... Please Wait..."
Expand All @@ -34,7 +34,7 @@
:crawl_id => Digest::SHA1.hexdigest("#{Time.now.to_i}.#{Time.now.usec}"),
:crawl_limit => nil,
:quiet => false,
:debug => false,
:debug => true,
:cache => nil,
:queue_system => :sidekiq
}
Expand All @@ -60,6 +60,7 @@
:crawl_id => Digest::SHA1.hexdigest("#{Time.now.to_i}.#{Time.now.usec}"),
:quiet => true,
:cache => nil,
:debug => true,
:queue_system => :sidekiq,
:valid_mime_types => ["text/html"]
}
Expand Down Expand Up @@ -87,6 +88,7 @@
@request = {
:crawl_id => Digest::SHA1.hexdigest("#{Time.now.to_i}.#{Time.now.usec}"),
:quiet => true,
:debug => true,
:queue_system => :sidekiq,
:cache => nil
}
Expand Down Expand Up @@ -136,7 +138,6 @@
wait_for_crawl_finished crawl[:crawl_id]

mime_types = CrawlProcessWorker.queue_items(0, 200).map{|job| JSON.parse(job)["args"][0]["mime_type"]}
ap mime_types
mime_types.select{|m| m=="text/html"}.count.should == 5
end
end
Expand Down Expand Up @@ -186,11 +187,11 @@
wait_for_crawl_finished crawl[:crawl_id]
CrawlFinishedWorker.queue_size.should == 1
end
it "should not crawl 100 pages" do
it "should not crawl more than 100 pages" do
crawl = @cobweb.start(@base_url)
@stat = Stats.new({:crawl_id => crawl[:crawl_id]})
wait_for_crawl_finished crawl[:crawl_id]
CrawlProcessWorker.queue_size.should_not == 100
CrawlProcessWorker.queue_size.should_not > 100
end
end
end
Expand Down Expand Up @@ -244,7 +245,7 @@ def clear_sidekiq_queues
conn.srem("queues", queue_name)
end
end
sleep 2
sleep 5

CrawlProcessWorker.queue_size.should == 0
CrawlFinishedWorker.queue_size.should == 0
Expand Down

0 comments on commit b5ccd69

Please sign in to comment.