From b5ccd69784af564bd05a66ce925758a04da1e693 Mon Sep 17 00:00:00 2001 From: Stewart McKee Date: Tue, 26 Nov 2013 23:27:20 +0000 Subject: [PATCH] updated worker spec --- spec/cobweb/crawl_worker_spec.rb | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/spec/cobweb/crawl_worker_spec.rb b/spec/cobweb/crawl_worker_spec.rb index 851d0a7..92ed338 100644 --- a/spec/cobweb/crawl_worker_spec.rb +++ b/spec/cobweb/crawl_worker_spec.rb @@ -8,8 +8,8 @@ if SIDEKIQ_INSTALLED #store all existing resque process ids so we don't kill them afterwards @existing_processes = `ps aux | grep sidekiq | grep -v grep | awk '{print $2}'`.split("\n") - puts @existing_processes - @existing_processes.should be_empty + + raise "Sidekiq is already running, please stop before running specs." if @existing_processes.count > 0 # START WORKERS ONLY FOR CRAWL QUEUE SO WE CAN COUNT ENQUEUED PROCESS AND FINISH QUEUES puts "Starting Workers... Please Wait..." @@ -34,7 +34,7 @@ :crawl_id => Digest::SHA1.hexdigest("#{Time.now.to_i}.#{Time.now.usec}"), :crawl_limit => nil, :quiet => false, - :debug => false, + :debug => true, :cache => nil, :queue_system => :sidekiq } @@ -60,6 +60,7 @@ :crawl_id => Digest::SHA1.hexdigest("#{Time.now.to_i}.#{Time.now.usec}"), :quiet => true, :cache => nil, + :debug => true, :queue_system => :sidekiq, :valid_mime_types => ["text/html"] } @@ -87,6 +88,7 @@ @request = { :crawl_id => Digest::SHA1.hexdigest("#{Time.now.to_i}.#{Time.now.usec}"), :quiet => true, + :debug => true, :queue_system => :sidekiq, :cache => nil } @@ -136,7 +138,6 @@ wait_for_crawl_finished crawl[:crawl_id] mime_types = CrawlProcessWorker.queue_items(0, 200).map{|job| JSON.parse(job)["args"][0]["mime_type"]} - ap mime_types mime_types.select{|m| m=="text/html"}.count.should == 5 end end @@ -186,11 +187,11 @@ wait_for_crawl_finished crawl[:crawl_id] CrawlFinishedWorker.queue_size.should == 1 end - it "should not crawl 100 pages" do + it "should not crawl more than 100 pages" do crawl = @cobweb.start(@base_url) @stat = Stats.new({:crawl_id => crawl[:crawl_id]}) wait_for_crawl_finished crawl[:crawl_id] - CrawlProcessWorker.queue_size.should_not == 100 + CrawlProcessWorker.queue_size.should_not > 100 end end end @@ -244,7 +245,7 @@ def clear_sidekiq_queues conn.srem("queues", queue_name) end end - sleep 2 + sleep 5 CrawlProcessWorker.queue_size.should == 0 CrawlFinishedWorker.queue_size.should == 0