Skip to content

Commit

Permalink
Refactored for Python3 compatibility
Browse files Browse the repository at this point in the history
  • Loading branch information
candale committed Jun 19, 2017
1 parent 15e0b70 commit 259162f
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 23 deletions.
7 changes: 4 additions & 3 deletions scrapy_jsonrpc/jsonrpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,11 @@ def jsonrpc_server_call(target, jsonrpc_request, json_decoder=None):
json_decoder = ScrapyJSONDecoder()

try:
req = json_decoder.decode(jsonrpc_request)
req = json_decoder.decode(jsonrpc_request.decode('utf-8'))
except Exception as e:
return jsonrpc_error(None, jsonrpc_errors.PARSE_ERROR, 'Parse error',
traceback.format_exc())
return jsonrpc_error(
None, jsonrpc_errors.PARSE_ERROR, 'Parse error',
traceback.format_exc())

try:
id, methname = req['id'], req['method']
Expand Down
13 changes: 9 additions & 4 deletions scrapy_jsonrpc/txweb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,24 @@

from twisted.web import resource

class JsonResource(resource.Resource):

class JsonResource(resource.Resource, object):

json_encoder = json.JSONEncoder()

def __init__(self):
super(JsonResource, self).__init__()

def render(self, txrequest):
r = resource.Resource.render(self, txrequest)
r = super(JsonResource, self).render(txrequest)
return self.render_object(r, txrequest)

def render_object(self, obj, txrequest):
r = self.json_encoder.encode(obj) + "\n"
r = (self.json_encoder.encode(obj) + "\n").encode()

txrequest.setHeader('Content-Type', 'application/json')
txrequest.setHeader('Access-Control-Allow-Origin', '*')
txrequest.setHeader('Access-Control-Allow-Methods', 'GET, POST, PATCH, PUT, DELETE')
txrequest.setHeader('Access-Control-Allow-Headers',' X-Requested-With')
txrequest.setHeader('Access-Control-Allow-Headers', 'X-Requested-With')
txrequest.setHeader('Content-Length', len(r))
return r
44 changes: 28 additions & 16 deletions scrapy_jsonrpc/webservice.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import six

from twisted.web import server, resource

Expand All @@ -17,27 +18,34 @@
class JsonResource(JsonResource_):

def __init__(self, crawler, target=None):
JsonResource_.__init__(self)
super(JsonResource, self).__init__()

self.crawler = crawler
self.json_encoder = ScrapyJSONEncoder(crawler=crawler)

def getChildWithDefault(self, path, request):
path = path.decode('UTF-8')
return super(JsonResource, self).getChildWithDefault(path, request)


class JsonRpcResource(JsonResource):

def __init__(self, crawler, target=None):
JsonResource.__init__(self, crawler, target)
super(JsonRpcResource, self).__init__(crawler, target)

self.json_decoder = ScrapyJSONDecoder(crawler=crawler)
self.crawler = crawler
self._target = target

def render_GET(self, txrequest):
def render_GET(self, request):
return self.get_target()

def render_POST(self, txrequest):
reqstr = txrequest.content.getvalue()
def render_POST(self, request):
reqstr = request.content.getvalue()
target = self.get_target()
return jsonrpc_server_call(target, reqstr, self.json_decoder)

def getChild(self, name, txrequest):
def getChild(self, name, request):
target = self.get_target()
try:
newtarget = getattr(target, name)
Expand All @@ -54,33 +62,37 @@ class CrawlerResource(JsonRpcResource):
ws_name = 'crawler'

def __init__(self, crawler):
JsonRpcResource.__init__(self, crawler, crawler)
super(CrawlerResource, self).__init__(crawler, target=crawler)


class RootResource(JsonResource):

def render_GET(self, txrequest):
return {'resources': self.children.keys()}
def render_GET(self, request):
return {'resources': list(self.children.keys())}

def getChild(self, name, txrequest):
def getChild(self, name, request):
if name == '':
return self
return JsonResource.getChild(self, name, txrequest)
return JsonResource.getChild(self, name, request)


class WebService(server.Site):
class WebService(server.Site, object):

def __init__(self, crawler):
if not crawler.settings.getbool('JSONRPC_ENABLED'):
raise NotConfigured
self.crawler = crawler

logfile = crawler.settings['JSONRPC_LOGFILE']
self.crawler = crawler
self.portrange = [int(x) for x in crawler.settings.getlist('JSONRPC_PORT', [6023, 6073])]
self.host = crawler.settings.get('JSONRPC_HOST', '127.0.0.1')
self.noisy = False

root = RootResource(crawler)
root.putChild('crawler', CrawlerResource(self.crawler))
server.Site.__init__(self, root, logPath=logfile)
self.noisy = False

super(WebService, self).__init__(root, logPath=logfile)

crawler.signals.connect(self.start_listening, signals.engine_started)
crawler.signals.connect(self.stop_listening, signals.engine_stopped)

Expand All @@ -90,10 +102,10 @@ def from_crawler(cls, crawler):

def start_listening(self):
self.port = listen_tcp(self.portrange, self.host, self)

logger.debug(
'Web service listening on {host.host:s}:{host.port:d}'.format(
host=self.port.getHost()))

def stop_listening(self):
self.port.stopListening()

0 comments on commit 259162f

Please sign in to comment.