Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 14 additions & 13 deletions scrapy_jsonrpc/jsonrpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import traceback
from six.moves import urllib

from scrapy.utils.python import unicode_to_str
from scrapy.utils.python import to_bytes
from scrapy_jsonrpc.serialize import ScrapyJSONDecoder


Expand Down Expand Up @@ -37,7 +37,7 @@ def jsonrpc_client_call(url, method, *args, **kwargs):
if args and kwargs:
raise ValueError("Pass *args or **kwargs but not both to jsonrpc_client_call")
req = {'jsonrpc': '2.0', 'method': method, 'params': args or kwargs, 'id': 1}
data = unicode_to_str(json.dumps(req))
data = to_bytes(json.dumps(req))
body = urllib.request.urlopen(url, data).read()
res = json.loads(body.decode('utf-8'))
if 'result' in res:
Expand All @@ -58,32 +58,33 @@ def jsonrpc_server_call(target, jsonrpc_request, json_decoder=None):
json_decoder = ScrapyJSONDecoder()

try:
req = json_decoder.decode(jsonrpc_request)
req = json_decoder.decode(jsonrpc_request.decode('utf-8'))
except Exception as e:
return jsonrpc_error(None, jsonrpc_errors.PARSE_ERROR, 'Parse error',
traceback.format_exc())
return jsonrpc_error(
None, jsonrpc_errors.PARSE_ERROR, 'Parse error',
traceback.format_exc())

try:
id, methname = req['id'], req['method']
request_id, methname = req['id'], req['method']
except KeyError:
return jsonrpc_error(None, jsonrpc_errors.INVALID_REQUEST, 'Invalid Request')

try:
method = getattr(target, methname)
except AttributeError:
return jsonrpc_error(id, jsonrpc_errors.METHOD_NOT_FOUND, 'Method not found')
return jsonrpc_error(request_id, jsonrpc_errors.METHOD_NOT_FOUND, 'Method not found')

params = req.get('params', [])
a, kw = ([], params) if isinstance(params, dict) else (params, {})
kw = dict([(str(k), v) for k, v in kw.items()]) # convert kw keys to str
try:
return jsonrpc_result(id, method(*a, **kw))
return jsonrpc_result(request_id, method(*a, **kw))
except Exception as e:
return jsonrpc_error(id, jsonrpc_errors.INTERNAL_ERROR, str(e), \
return jsonrpc_error(request_id, jsonrpc_errors.INTERNAL_ERROR, str(e), \
traceback.format_exc())


def jsonrpc_error(id, code, message, data=None):
def jsonrpc_error(request_id, code, message, data=None):
"""Create JSON-RPC error response"""
return {
'jsonrpc': '2.0',
Expand All @@ -92,14 +93,14 @@ def jsonrpc_error(id, code, message, data=None):
'message': message,
'data': data,
},
'id': id,
'id': request_id,
}


def jsonrpc_result(id, result):
def jsonrpc_result(request_id, result):
"""Create JSON-RPC result response"""
return {
'jsonrpc': '2.0',
'result': result,
'id': id,
'id': request_id,
}
13 changes: 9 additions & 4 deletions scrapy_jsonrpc/txweb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,24 @@

from twisted.web import resource

class JsonResource(resource.Resource):

class JsonResource(resource.Resource, object):

json_encoder = json.JSONEncoder()

def __init__(self):
super(JsonResource, self).__init__()

def render(self, txrequest):
r = resource.Resource.render(self, txrequest)
r = super(JsonResource, self).render(txrequest)
return self.render_object(r, txrequest)

def render_object(self, obj, txrequest):
r = self.json_encoder.encode(obj) + "\n"
r = (self.json_encoder.encode(obj) + "\n").encode()

txrequest.setHeader('Content-Type', 'application/json')
txrequest.setHeader('Access-Control-Allow-Origin', '*')
txrequest.setHeader('Access-Control-Allow-Methods', 'GET, POST, PATCH, PUT, DELETE')
txrequest.setHeader('Access-Control-Allow-Headers',' X-Requested-With')
txrequest.setHeader('Access-Control-Allow-Headers', 'X-Requested-With')
txrequest.setHeader('Content-Length', len(r))
return r
63 changes: 48 additions & 15 deletions scrapy_jsonrpc/webservice.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import logging
import six

from twisted.web import server, resource
from twisted.python.compat import _PY3, nativeString

from scrapy.exceptions import NotConfigured
from scrapy import signals
Expand All @@ -17,27 +19,34 @@
class JsonResource(JsonResource_):

def __init__(self, crawler, target=None):
JsonResource_.__init__(self)
super(JsonResource, self).__init__()

self.crawler = crawler
self.json_encoder = ScrapyJSONEncoder(crawler=crawler)

def getChildWithDefault(self, path, request):
path = path.decode('UTF-8')
return super(JsonResource, self).getChildWithDefault(path, request)


class JsonRpcResource(JsonResource):

def __init__(self, crawler, target=None):
JsonResource.__init__(self, crawler, target)
super(JsonRpcResource, self).__init__(crawler, target)

self.json_decoder = ScrapyJSONDecoder(crawler=crawler)
self.crawler = crawler
self._target = target

def render_GET(self, txrequest):
def render_GET(self, request):
return self.get_target()

def render_POST(self, txrequest):
reqstr = txrequest.content.getvalue()
def render_POST(self, request):
reqstr = request.content.getvalue()
target = self.get_target()
return jsonrpc_server_call(target, reqstr, self.json_decoder)

def getChild(self, name, txrequest):
def getChild(self, name, request):
target = self.get_target()
try:
newtarget = getattr(target, name)
Expand All @@ -54,33 +63,37 @@ class CrawlerResource(JsonRpcResource):
ws_name = 'crawler'

def __init__(self, crawler):
JsonRpcResource.__init__(self, crawler, crawler)
super(CrawlerResource, self).__init__(crawler, target=crawler)


class RootResource(JsonResource):

def render_GET(self, txrequest):
return {'resources': self.children.keys()}
def render_GET(self, request):
return {'resources': list(self.children.keys())}

def getChild(self, name, txrequest):
def getChild(self, name, request):
if name == '':
return self
return JsonResource.getChild(self, name, txrequest)
return JsonResource.getChild(self, name, request)


class WebService(server.Site):
class WebService(server.Site, object):

def __init__(self, crawler):
if not crawler.settings.getbool('JSONRPC_ENABLED'):
raise NotConfigured
self.crawler = crawler

logfile = crawler.settings['JSONRPC_LOGFILE']
self.crawler = crawler
self.portrange = [int(x) for x in crawler.settings.getlist('JSONRPC_PORT', [6023, 6073])]
self.host = crawler.settings.get('JSONRPC_HOST', '127.0.0.1')
self.noisy = False

root = RootResource(crawler)
root.putChild('crawler', CrawlerResource(self.crawler))
server.Site.__init__(self, root, logPath=logfile)
self.noisy = False

super(WebService, self).__init__(root, logPath=logfile)

crawler.signals.connect(self.start_listening, signals.engine_started)
crawler.signals.connect(self.stop_listening, signals.engine_stopped)

Expand All @@ -90,10 +103,30 @@ def from_crawler(cls, crawler):

def start_listening(self):
self.port = listen_tcp(self.portrange, self.host, self)

logger.debug(
'Web service listening on {host.host:s}:{host.port:d}'.format(
host=self.port.getHost()))

def stop_listening(self):
self.port.stopListening()

def log(self, request):
"""
Write a line representing C{request} to the access log file.

@param request: The request object about which to log.
@type request: L{Request}
"""
try:
logFile = self.logFile
except AttributeError:
pass
else:
line = self._logFormatter(self._logDateTime, request) + u"\n"
if self._nativeize:
line = nativeString(line)
# If we're on Python3, we don't need to encode it
elif _PY3 is False:
line = line.encode("utf-8")
logFile.write(line)
23 changes: 11 additions & 12 deletions tests/test_jsonrpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from scrapy_jsonrpc.jsonrpc import jsonrpc_client_call, jsonrpc_server_call, \
JsonRpcError, jsonrpc_errors
from scrapy_jsonrpc.serialize import ScrapyJSONDecoder
from scrapy.utils.python import unicode_to_str, str_to_unicode
from scrapy.utils.python import to_bytes, to_unicode
from tests.test_serialize import CrawlerMock


Expand All @@ -17,8 +17,7 @@ def _umock(result=None, error=None):
response.update(result=result)
if error is not None:
response.update(error=error)
return BytesIO(unicode_to_str(json.dumps(response)))

return BytesIO(to_bytes(json.dumps(response)))


class TestTarget(object):
Expand Down Expand Up @@ -48,7 +47,7 @@ def _urlopen(url, data):

with patch.object(urllib.request, 'urlopen', _urlopen):
jsonrpc_client_call('url', 'test', 'one', 2)
req = json.loads(str_to_unicode(sentcall['data']))
req = json.loads(to_unicode(sentcall['data']))
assert 'id' in req
self.assertEqual(sentcall['url'], 'url')
self.assertEqual(req['jsonrpc'], '2.0')
Expand All @@ -61,7 +60,7 @@ def test_jsonrpc_client_call_response(self, urlopen_mock):
# must return result or error
self.assertRaises(ValueError, jsonrpc_client_call, 'url', 'test')
urlopen_mock.return_value = _umock(result={'one': 1})
self.assertEquals(jsonrpc_client_call('url', 'test'), {'one': 1})
self.assertEqual(jsonrpc_client_call('url', 'test'), {'one': 1})
urlopen_mock.return_value = _umock(error={'code': 123,
'message': 'hello',
'data': 'some data'})
Expand All @@ -80,47 +79,47 @@ def test_jsonrpc_client_call_response(self, urlopen_mock):

def test_jsonrpc_server_call(self):
t = TestTarget()
r = jsonrpc_server_call(t, u'invalid json data', self.json_decoder)
r = jsonrpc_server_call(t, b'invalid json data', self.json_decoder)
assert 'error' in r
assert r['jsonrpc'] == '2.0'
assert r['id'] is None
self.assertEqual(r['error']['code'], jsonrpc_errors.PARSE_ERROR)
assert 'Traceback' in r['error']['data']

r = jsonrpc_server_call(t, u'{"test": "test"}', self.json_decoder)
r = jsonrpc_server_call(t, b'{"test": "test"}', self.json_decoder)
assert 'error' in r
assert r['jsonrpc'] == '2.0'
assert r['id'] is None
self.assertEqual(r['error']['code'], jsonrpc_errors.INVALID_REQUEST)

r = jsonrpc_server_call(t, u'{"method": "notfound", "id": 1}', self.json_decoder)
r = jsonrpc_server_call(t, b'{"method": "notfound", "id": 1}', self.json_decoder)
assert 'error' in r
assert r['jsonrpc'] == '2.0'
assert r['id'] == 1
self.assertEqual(r['error']['code'], jsonrpc_errors.METHOD_NOT_FOUND)

r = jsonrpc_server_call(t, u'{"method": "exception", "id": 1}', self.json_decoder)
r = jsonrpc_server_call(t, b'{"method": "exception", "id": 1}', self.json_decoder)
assert 'error' in r
assert r['jsonrpc'] == '2.0'
assert r['id'] == 1
self.assertEqual(r['error']['code'], jsonrpc_errors.INTERNAL_ERROR)
assert 'testing-errors' in r['error']['message']
assert 'Traceback' in r['error']['data']

r = jsonrpc_server_call(t, u'{"method": "call", "id": 2}', self.json_decoder)
r = jsonrpc_server_call(t, b'{"method": "call", "id": 2}', self.json_decoder)
assert 'result' in r
assert r['jsonrpc'] == '2.0'
assert r['id'] == 2
self.assertEqual(r['result'], ([], {}))

r = jsonrpc_server_call(t, u'{"method": "call", "params": [456, 123], "id": 3}',
r = jsonrpc_server_call(t, b'{"method": "call", "params": [456, 123], "id": 3}',
self.json_decoder)
assert 'result' in r
assert r['jsonrpc'] == '2.0'
assert r['id'] == 3
self.assertEqual(r['result'], ([456, 123], {}))

r = jsonrpc_server_call(t, u'{"method": "call", "params": {"data": 789}, "id": 3}',
r = jsonrpc_server_call(t, b'{"method": "call", "params": {"data": 789}, "id": 3}',
self.json_decoder)
assert 'result' in r
assert r['jsonrpc'] == '2.0'
Expand Down
7 changes: 5 additions & 2 deletions tests/test_serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from twisted.internet import defer

from scrapy.spider import Spider
from scrapy.spiders import Spider
from scrapy.http import Request, Response
from scrapy_jsonrpc.serialize import SpiderReferencer, ScrapyJSONEncoder, ScrapyJSONDecoder

Expand All @@ -14,10 +14,12 @@ class _EngineMock(object):
def __init__(self, open_spiders):
self.open_spiders = open_spiders


class CrawlerMock(object):
def __init__(self, open_spiders):
self.engine = _EngineMock(open_spiders)


class BaseTestCase(unittest.TestCase):

def setUp(self):
Expand All @@ -29,6 +31,7 @@ def setUp(self):
self.encoder = ScrapyJSONEncoder(spref=self.spref)
self.decoder = ScrapyJSONDecoder(spref=self.spref)


class SpiderReferencerTestCase(BaseTestCase):

def test_spiders_and_references(self):
Expand Down Expand Up @@ -75,6 +78,7 @@ def test_encode_decode(self):
self.assertEqual(sr.encode_references(spiders), refs)
self.assertEqual(sr.decode_references(refs), spiders)


class JsonEncoderTestCase(BaseTestCase):

def test_encode_decode(self):
Expand Down Expand Up @@ -130,4 +134,3 @@ def test_encode_response(self):

if __name__ == "__main__":
unittest.main()

4 changes: 2 additions & 2 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
# and then run "tox" from this directory.

[tox]
envlist = py27
envlist = py27, py36

[testenv]
deps =
mock
pytest
pytest-twisted
commands =
py.test --twisted {posargs:tests}
py.test {posargs:tests}