Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
chap3 updated
  • Loading branch information
masamichiIto committed Aug 23, 2023
commit 7c01c06a193583130bcd97ae5ac66575d7bc6663
23 changes: 22 additions & 1 deletion chap3_work.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
print(link.attrs['href'])

## p.35あたり
"""
# 停止条件が満たされにくいコードのため,どっかでctrl+Cで止める必要あり
import datetime
import random

Expand All @@ -23,4 +25,23 @@ def getLinks(articleUrl):
while len(links) > 0:
newArticle = links[random.randint(0, len(links)-1)].attrs['href']
print(newArticle)
links = getLinks(newArticle)
links = getLinks(newArticle)
"""

## 3-2
# 停止条件が満たされにくいコードのため,どっかでctrl+Cで止める必要あり
pages = set()
def getLinks(pageUrl):
global pages
html = urlopen('http://en.wikipedia.org{}'.format(pageUrl))
bs = BeautifulSoup(html, 'html.parser')
for link in bs.find_all('a', href=re.compile('^(/wiki/)')):
if 'href' in link.attrs:
if link.attrs['href'] not in pages:
# 新しいページに出会った
newPage = link.attrs['href']
print(newPage)
pages.add(newPage)
getLinks(newPage)

getLinks('')