Skip to content

Commit d5a3c06

Browse files
committed
Updated netloc appropriate is url scheme is not passed
1 parent 79c1755 commit d5a3c06

File tree

2 files changed

+61
-48
lines changed

2 files changed

+61
-48
lines changed

search_engine_parser/core/base.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ class ReturnType(Enum):
2626

2727
# All results returned are each items of search
2828
class SearchItem(dict):
29-
"""
30-
SearchItem is a dict of results containing keys (titles, descriptions, links and other
29+
"""
30+
SearchItem is a dict of results containing keys (titles, descriptions, links and other
3131
additional keys dependending on the engine)
3232
>>> result
3333
<search_engine_parser.core.base.SearchItem object at 0x7f907426a280>
@@ -48,7 +48,7 @@ def __getitem__(self, value):
4848

4949

5050
class SearchResult():
51-
"""
51+
"""
5252
The SearchResults after the searching
5353
5454
>>> results = gsearch.search("preaching the choir", 1)
@@ -83,9 +83,9 @@ def keys(self):
8383
with suppress(IndexError):
8484
x = self.results[0]
8585
keys = x.keys()
86-
return keys
86+
return keys
8787

88-
def __len__(self):
88+
def __len__(self):
8989
return len(self.results)
9090

9191
def __repr_(self):
@@ -203,13 +203,17 @@ def get_search_url(self, query=None, page=None, **kwargs):
203203
# Some URLs use offsets
204204
offset = (page * 10) - 9
205205
params = self.get_params(
206-
query=query, page=page, offset=offset, **kwargs)
206+
query=query, page=page, offset=offset, **kwargs)
207207
url = urlparse(self.search_url)
208208
# For localization purposes, custom urls can be parsed for the same engine
209209
# such as google.de and google.com
210210
if kwargs.get("url"):
211211
new_url = urlparse(kwargs.pop("url"))
212-
url._replace(netloc=new_url.netloc)
212+
# When passing without scheme e.g google.de, url is parsed as path
213+
if not new_url.netloc:
214+
url = url._replace(netloc=new_url.path)
215+
else:
216+
url = url._replace(netloc=new_url.netloc)
213217
self._parsed_url = url._replace(query=urlencode(params))
214218

215219
return self._parsed_url.geturl()

search_engine_parser/core/cli.py

Lines changed: 50 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,13 @@
88
from datetime import datetime
99
from importlib import import_module
1010

11+
from blessed import Terminal
12+
from search_engine_parser import __version__
13+
from search_engine_parser.core.base import ReturnType
1114
from search_engine_parser.core.exceptions import NoResultsOrTrafficError
1215

1316

14-
def display(results, term, **args):
17+
def display(results, term, args):
1518
""" Displays search results
1619
"""
1720
def print_one(kwargs):
@@ -31,50 +34,57 @@ def print_one(kwargs):
3134
print("\n")
3235

3336

34-
if args.get('rank') and args["rank"] > 10:
37+
if args.rank and args.rank > 10:
3538
sys.exit(
3639
"Results are only limited to 10, specify a different page number instead")
3740

38-
if not args.get('rank'):
39-
len_results = 0
41+
if not args.rank:
4042
for i in results:
4143
print_one(i)
4244
else:
43-
rank = args["rank"]
45+
rank = args.rank
4446
print_one(results[rank])
45-
47+
48+
49+
def get_engine_class(engine):
50+
""" Return the Engine Class """
51+
try:
52+
module = import_module(f"search_engine_parser.core.engines.{engine.lower()}")
53+
return getattr(module, "Search")
54+
except (ImportError, ModuleNotFoundError):
55+
sys.exit('Engine < {} > does not exist'.format(engine))
56+
57+
58+
def show_summary(term, engine_class):
59+
""" Show the summary of an Engine"""
60+
print("\t{}".format(term.magenta(engine_class.name)))
61+
print("\t-----------------------------------------------------")
62+
print(engine_class.summary)
4663

4764

4865
def main(args): # pylint: disable=too-many-branches
4966
"""
5067
Executes logic from parsed arguments
5168
"""
5269
term = Terminal()
53-
engine = args['engine']
54-
try:
55-
module = import_module(f"search_engine_parser.core.engines.{engine.lower()}")
56-
engine_class = getattr(module, "Search")
57-
except (ImportError, ModuleNotFoundError):
58-
sys.exit('Engine < {} > does not exist'.format(engine))
70+
engine_class = get_engine_class(args.engine)
5971

60-
# check if in summary mode
61-
if args.get("show"):
62-
print("\t{}".format(term.magenta(engine_class.name)))
63-
print("\t-----------------------------------------------------")
64-
print(engine_class.summary)
65-
sys.exit(0)
72+
if args.show_summary:
73+
show_summary(term, engine_class)
74+
return
6675

6776
# Initialize search Engine with required params
6877
engine = engine_class()
6978
try:
70-
if args['clear_cache']:
79+
if args.clear_cache:
7180
engine.clear_cache()
7281
# Display full details: Header, Link, Description
7382
start = datetime.now()
74-
results = engine.search(args['query'], args['page'], return_type=ReturnType(args["type"]), url=args.get("url"))
83+
results = engine.search(
84+
args.query, args.page, return_type=ReturnType(args.type), url=args.url)
7585
duration = datetime.now() - start
76-
display(results, term, type=args.get('type'), rank=args.get('rank'))
77-
print("Total search took -> %s seconds" %(duration))
86+
display(results, term, args)
87+
print("Total search took -> %s seconds" %(duration))
7888
except NoResultsOrTrafficError as exc:
7989
print('\n', '{}'.format(term.red(str(exc))))
8090

@@ -86,57 +96,56 @@ def runner():
8696
parser = argparse.ArgumentParser(description='SearchEngineParser', prog="pysearch")
8797

8898
parser.add_argument('-V', '--version', action="version", version="%(prog)s v" + __version__)
99+
89100
parser.add_argument(
90101
'-e', '--engine',
91102
help='Engine to use for parsing the query e.g google, yahoo, bing,'
92103
'duckduckgo (default: google)',
93104
default='google')
94-
# add subparsers for summary mode and search mode
95-
subparsers = parser.add_subparsers(help='help for subcommands')
96105

97-
parser_search = subparsers.add_parser('search', help='search help')
106+
parser.add_argument(
107+
'--show-summary',
108+
action='store_true',
109+
help='Shows the summary of an engine')
98110

99-
parser_search.add_argument(
111+
parser.add_argument(
100112
'-u',
101113
'--url',
102114
help='A custom link to use as base url for search e.g google.de')
103115

104-
parser_search.add_argument(
116+
parser.add_argument(
105117
'-q',
106118
'--query',
107119
help='Query string to search engine for',
108120
required=True)
109-
parser_search.add_argument(
121+
parser.add_argument(
110122
'-p',
111123
'--page',
112124
type=int,
113125
help='Page of the result to return details for (default: 1)',
114126
default=1)
115-
parser_search.add_argument(
127+
parser.add_argument(
116128
'-t', '--type',
117129
help='Type of detail to return i.e full, links, desciptions or titles (default: full)',
118130
default="full")
119-
parser_search.add_argument(
120-
'-cc', '--clear_cache',
131+
parser.add_argument(
132+
'-cc', '--clear-cache',
121133
action='store_true',
122134
help='Clear cache of engine before searching'
123135
)
124-
parser_search.add_argument(
136+
parser.add_argument(
125137
'-r',
126138
'--rank',
127139
type=int,
128140
help='ID of Detail to return e.g 5 (default: 0)')
129141

130-
parser_summary = subparsers.add_parser('summary', help='summary help')
131-
parser_summary.add_argument(
132-
'-s',
133-
'--show',
134-
type=int,
135-
help='Show engine description (default: 1)',
136-
default=1)
137142

138-
args = vars(parser.parse_args())
139-
main(args)
143+
args = parser.parse_args()
144+
# If subcommand has associated function, run the function else call main
145+
try:
146+
args.func(args)
147+
except AttributeError:
148+
main(args)
140149

141150

142151
if __name__ == '__main__':

0 commit comments

Comments
 (0)