Skip to content
This repository was archived by the owner on Jul 19, 2018. It is now read-only.

Commit a04c42f

Browse files
committed
Some PY3 ports
1 parent 08445a3 commit a04c42f

File tree

7 files changed

+14
-9
lines changed

7 files changed

+14
-9
lines changed

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
six>=1.5.2
12
boto
23
hubstorage
34
python-dateutil

scrapylib/constraints/pipeline.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,6 @@ def process_item(self, item, spider):
66
try:
77
for c in item.constraints:
88
c(item)
9-
except AssertionError, e:
9+
except AssertionError as e:
1010
raise DropItem(str(e))
1111
return item

scrapylib/magicfields.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def _extract_regex_group(regex, txt):
7878
try:
7979
compiled = re.compile(regex)
8080
_REGEXES[regex] = compiled
81-
except Exception, e:
81+
except Exception as e:
8282
errmessage = e.message
8383
_REGEX_ERRORS[regex] = errmessage
8484
if errmessage:
@@ -143,7 +143,7 @@ def _format(fmt, spider, response, item, fixed_values):
143143
if regex:
144144
try:
145145
out = _extract_regex_group(regex, out)
146-
except ValueError, e:
146+
except ValueError as e:
147147
spider.log("Error at '%s': %s" % (m.group(), e.message))
148148

149149
return out

scrapylib/processors/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import locale as localelib
33
import re
44
import time
5-
from urlparse import urljoin
5+
from six.moves.urllib.parse import urljoin
66

77
from scrapy.loader.processors import MapCompose, TakeFirst
88
from scrapy.utils.markup import (remove_tags, replace_escape_chars,

scrapylib/proxy.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
import base64
2-
from urllib import unquote
3-
from urllib2 import _parse_proxy
4-
from urlparse import urlunparse
2+
from six.moves.urllib.parse import unquote
3+
try:
4+
from urllib2 import _parse_proxy
5+
except ImportError:
6+
from urllib.request import _parse_proxy
7+
from six.moves.urllib.parse import urlunparse
58

69

710
class SelectiveProxyMiddleware(object):

scrapylib/querycleaner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
Remove patterns has precedence.
99
"""
1010
import re
11-
from urllib import quote
11+
from six.moves.urllib.parse import quote
1212

1313
from scrapy.utils.httpobj import urlparse_cached
1414
from scrapy.http import Request

tests/test_magicfields.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from __future__ import print_function
12
import re, os
23
from unittest import TestCase
34

@@ -26,7 +27,7 @@ def setUp(self):
2627
self.spider = Spider('myspider', arg1='val1', start_urls = ["http://example.com"])
2728

2829
def _log(x):
29-
print x
30+
print(x)
3031

3132
self.spider.log = _log
3233
self.response = HtmlResponse(body="<html></html>", url="http://www.example.com/product/8798732")

0 commit comments

Comments
 (0)