Skip to content

Commit

Permalink
Moved scrapy-ws script to extras/ and fixed broken methods due to cha…
Browse files Browse the repository at this point in the history
…nges in web service API

--HG--
rename : bin/scrapy-ws.py => extras/scrapy-ws.py
  • Loading branch information
pablohoffman committed Aug 27, 2010
1 parent 648f700 commit e14cc2c
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 19 deletions.
5 changes: 0 additions & 5 deletions debian/rules
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,3 @@

%:
dh $@

override_dh_auto_install:
dh_auto_install
mkdir -p $(CURDIR)/debian/scrapy/usr/bin
mv $(CURDIR)/debian/tmp/usr/bin/scrapy-ws.py $(CURDIR)/debian/scrapy/usr/bin/scrapy-ws
1 change: 0 additions & 1 deletion debian/scrapy.lintian-overrides
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
new-package-should-close-itp-bug
binary-without-manpage usr/bin/scrapy-ws
extra-license-file usr/share/pyshared/scrapy/xlib/pydispatch/license.txt
26 changes: 13 additions & 13 deletions bin/scrapy-ws.py → extras/scrapy-ws.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
#!/usr/bin/env python
"""
Example script to control and monitor Scrapy using its web service. It only
provides a reduced functionality as its main purpose is to illustrate how to
write a web service client. Feel free to improve or write you own.
Example script to control a Scrapy server using its JSON-RPC web service.
It only provides a reduced functionality as its main purpose is to illustrate
how to write a web service client. Feel free to improve or write you own.
Also, keep in mind that the JSON-RPC API is not stable. The recommended way for
controlling a Scrapy server is through the execution queue (see the "queue"
command).
"""

import sys, optparse, urllib
Expand All @@ -19,7 +25,6 @@ def get_commands():
'list-available': cmd_list_available,
'list-running': cmd_list_running,
'list-resources': cmd_list_resources,
'list-extensions': cmd_list_extensions,
'get-global-stats': cmd_get_global_stats,
'get-spider-stats': cmd_get_spider_stats,
}
Expand All @@ -32,32 +37,27 @@ def cmd_help(args, opts):

def cmd_run(args, opts):
"""run <spider_name> - schedule spider for running"""
jsonrpc_call(opts, 'manager/queue', 'append_spider_name', args[0])
jsonrpc_call(opts, 'crawler/queue', 'append_spider_name', args[0])

def cmd_stop(args, opts):
"""stop <spider> - stop a running spider"""
jsonrpc_call(opts, 'manager/engine', 'close_spider', args[0])
jsonrpc_call(opts, 'crawler/engine', 'close_spider', args[0])

def cmd_list_running(args, opts):
"""list-running - list running spiders"""
for x in json_get(opts, 'manager/engine/open_spiders'):
for x in json_get(opts, 'crawler/engine/open_spiders'):
print x

def cmd_list_available(args, opts):
"""list-available - list name of available spiders"""
for x in jsonrpc_call(opts, 'spiders', 'list'):
for x in jsonrpc_call(opts, 'crawler/spiders', 'list'):
print x

def cmd_list_resources(args, opts):
"""list-resources - list available web service resources"""
for x in json_get(opts, '')['resources']:
print x

def cmd_list_extensions(args, opts):
"""list-extensions - list enabled extensions"""
for x in jsonrpc_call(opts, 'extensions/enabled', 'keys'):
print x

def cmd_get_spider_stats(args, opts):
"""get-spider-stats <spider> - get stats of a running spider"""
stats = jsonrpc_call(opts, 'stats', 'get_stats', args[0])
Expand Down

0 comments on commit e14cc2c

Please sign in to comment.