Skip to content

Commit d91ec0e

Browse files
committed
Your code is too weak for PEP8. You lack DISCIPLINE
1 parent 39f9356 commit d91ec0e

File tree

12 files changed

+545
-530
lines changed

12 files changed

+545
-530
lines changed

Chapter2_MorePyMC/daft_plot.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,14 @@
1-
#daft drawing for SMS example
1+
# daft drawing for SMS example
22
import matplotlib.pyplot as plt
33

44

5-
65
try:
76
import daft
87
except ImportError:
98
print "python library Daft required."
10-
119

12-
pgm = daft.PGM([9, 4], origin=[.5,.5])
10+
11+
pgm = daft.PGM([9, 4], origin=[.5, .5])
1312
pgm.add_node(daft.Node("tau", r"$\tau$", 4.0, 3.5))
1413
pgm.add_node(daft.Node("alpha", r"$\alpha$", 6, 4.0))
1514
pgm.add_node(daft.Node("lambda1", r"$\lambda_1$", 5.5, 3.2,))
@@ -18,7 +17,6 @@
1817
pgm.add_node(daft.Node("obs", "obs", 5.0, 1.0, 1.2, observed=True))
1918

2019

21-
2220
pgm.add_edge("tau", "lambda")
2321
pgm.add_edge("alpha", "lambda1")
2422
pgm.add_edge("alpha", "lambda2")
@@ -27,5 +25,5 @@
2725

2826
pgm.add_edge("lambda", "obs")
2927
pgm.render()
30-
plt.figure( figsize=(12,5) )
31-
plt.show()
28+
plt.figure(figsize=(12, 5))
29+
plt.show()

Chapter2_MorePyMC/separation_plot.py

Lines changed: 23 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -7,49 +7,44 @@
77
import numpy as np
88

99

10-
11-
def separation_plot( p, y, **kwargs ):
10+
def separation_plot(p, y, **kwargs):
1211
"""
1312
This function creates a separation plot for logistic and probit classification.
1413
See http://mdwardlab.com/sites/default/files/GreenhillWardSacks.pdf
15-
14+
1615
p: The proportions/probabilities, can be a nxM matrix which represents M models.
1716
y: the 0-1 response variables.
18-
19-
"""
17+
18+
"""
2019
assert p.shape[0] == y.shape[0], "p.shape[0] != y.shape[0]"
2120
n = p.shape[0]
2221

2322
try:
2423
M = p.shape[1]
2524
except:
26-
p = p.reshape( n, 1 )
25+
p = p.reshape(n, 1)
2726
M = p.shape[1]
2827

29-
#colors = np.array( ["#fdf2db", "#e44a32"] )
30-
colors_bmh = np.array( ["#eeeeee", "#348ABD"] )
28+
# colors = np.array( ["#fdf2db", "#e44a32"] )
29+
colors_bmh = np.array(["#eeeeee", "#348ABD"])
3130

31+
fig = plt.figure() # figsize = (8, 1.3*M) )
3232

33-
fig = plt.figure( )#figsize = (8, 1.3*M) )
34-
3533
for i in range(M):
36-
ax = fig.add_subplot(M, 1, i+1)
37-
ix = np.argsort( p[:,i] )
38-
#plot the different bars
39-
bars = ax.bar( np.arange(n), np.ones(n), width=1.,
40-
color = colors_bmh[ y[ix].astype(int) ],
41-
edgecolor = 'none')
42-
ax.plot( np.arange(n), p[ix,i], "k",
43-
linewidth = 1.,drawstyle="steps-post" )
44-
#create expected value bar.
45-
ax.vlines( [(1-p[ix,i]).sum()], [0], [1] )
46-
#ax.grid(False)
47-
#ax.axis('off')
48-
plt.xlim( 0, n-1)
49-
34+
ax = fig.add_subplot(M, 1, i + 1)
35+
ix = np.argsort(p[:, i])
36+
# plot the different bars
37+
bars = ax.bar(np.arange(n), np.ones(n), width=1.,
38+
color=colors_bmh[y[ix].astype(int)],
39+
edgecolor='none')
40+
ax.plot(np.arange(n), p[ix, i], "k",
41+
linewidth=1., drawstyle="steps-post")
42+
# create expected value bar.
43+
ax.vlines([(1 - p[ix, i]).sum()], [0], [1])
44+
# ax.grid(False)
45+
# ax.axis('off')
46+
plt.xlim(0, n - 1)
47+
5048
plt.tight_layout()
51-
52-
return
53-
5449

55-
50+
return

Chapter3_MCMC/github_pull.py

Lines changed: 40 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#github data scrapper
1+
# github data scrapper
22

33
"""
44
variables of interest:
@@ -21,69 +21,66 @@
2121
from requests import get
2222

2323

24-
2524
MAX = 8000000
26-
today = datetime.datetime.today()
25+
today = datetime.datetime.today()
2726
randint = np.random.randint
28-
N = 120 #sample size.
29-
auth = ("username", "password" )
27+
N = 120 # sample size.
28+
auth = ("username", "password")
3029

31-
language_mappings = {"Python": 0, "JavaScript": 1, "Ruby": 2, "Java":3, "Shell":4, "PHP":5}
30+
language_mappings = {
31+
"Python": 0, "JavaScript": 1, "Ruby": 2, "Java": 3, "Shell": 4, "PHP": 5}
3232

33-
#define data matrix:
34-
X = np.zeros( (N , 12), dtype = int )
33+
# define data matrix:
34+
X = np.zeros((N, 12), dtype=int)
3535

3636
for i in xrange(N):
3737
is_fork = True
3838
is_valid_language = False
39-
39+
4040
while is_fork == True or is_valid_language == False:
4141
is_fork = True
4242
is_valid_language = False
43-
44-
params = {"since":randint(0, MAX ) }
45-
r = get("https://api.github.com/repositories", params = params, auth=auth )
46-
results = loads( r.text )[0]
47-
#im only interested in the first one, and if it is not a fork.
43+
44+
params = {"since": randint(0, MAX)}
45+
r = get(
46+
"https://api.github.com/repositories", params=params, auth=auth)
47+
results = loads(r.text)[0]
48+
# im only interested in the first one, and if it is not a fork.
4849
is_fork = results["fork"]
49-
50-
r = get( results["url"], auth = auth)
51-
52-
#check the language
53-
repo_results = loads( r.text )
54-
try:
55-
language_mappings[ repo_results["language" ] ]
50+
51+
r = get(results["url"], auth=auth)
52+
53+
# check the language
54+
repo_results = loads(r.text)
55+
try:
56+
language_mappings[repo_results["language"]]
5657
is_valid_language = True
5758
except:
5859
pass
59-
60-
6160

62-
#languages
63-
X[ i, language_mappings[ repo_results["language" ] ] ] = 1
64-
65-
#delta time
66-
X[ i, 6] = ( today - datetime.datetime.strptime( repo_results["created_at"][:10], "%Y-%m-%d" ) ).days
67-
68-
#haswiki
61+
# languages
62+
X[i, language_mappings[repo_results["language"]]] = 1
63+
64+
# delta time
65+
X[i, 6] = (
66+
today - datetime.datetime.strptime(repo_results["created_at"][:10], "%Y-%m-%d")).days
67+
68+
# haswiki
6969
X[i, 7] = repo_results["has_wiki"]
70-
71-
#get user information
72-
r = get( results["owner"]["url"] , auth = auth)
73-
user_results = loads( r.text )
70+
71+
# get user information
72+
r = get(results["owner"]["url"], auth=auth)
73+
user_results = loads(r.text)
7474
X[i, 8] = user_results["following"]
7575
X[i, 9] = user_results["followers"]
76-
77-
#get dep. data
76+
77+
# get dep. data
7878
X[i, 10] = repo_results["watchers_count"]
7979
X[i, 11] = repo_results["forks_count"]
80-
print
80+
print
8181
print " -------------- "
82-
print i, ": ", results["full_name"], repo_results["language" ], repo_results["watchers_count"], repo_results["forks_count"]
82+
print i, ": ", results["full_name"], repo_results["language"], repo_results["watchers_count"], repo_results["forks_count"]
8383
print " -------------- "
84-
print
85-
86-
np.savetxt("data/github_data.csv", X, delimiter=",", fmt="%d" )
87-
88-
84+
print
8985

86+
np.savetxt("data/github_data.csv", X, delimiter=",", fmt="%d")

Chapter4_TheGreatestTheoremNeverTold/top_pic_comments.py

Lines changed: 13 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,20 @@
77

88

99
reddit = praw.Reddit("BayesianMethodsForHackers")
10-
subreddit = reddit.get_subreddit( "pics" )
10+
subreddit = reddit.get_subreddit("pics")
1111

1212
top_submissions = subreddit.get_top()
1313

1414

15-
n_pic = int( sys.argv[1] ) if sys.argv[1] else 1
15+
n_pic = int(sys.argv[1]) if sys.argv[1] else 1
1616

1717
i = 0
1818
while i < n_pic:
1919
top_submission = top_submissions.next()
2020
while "i.imgur.com" not in top_submission.url:
21-
#make sure it is linking to an image, not a webpage.
21+
# make sure it is linking to an image, not a webpage.
2222
top_submission = top_submissions.next()
23-
i+=1
23+
i += 1
2424

2525
print "Title of submission: \n", top_submission.title
2626
top_post_url = top_submission.url
@@ -31,33 +31,13 @@
3131
downvotes = []
3232
contents = []
3333
_all_comments = top_submission.comments
34-
all_comments=[]
34+
all_comments = []
3535
for comment in _all_comments:
36-
try:
37-
upvotes.append( comment.ups )
38-
downvotes.append( comment.downs )
39-
contents.append( comment.body )
40-
except Exception as e:
41-
continue
42-
43-
votes = np.array( [ upvotes, downvotes] ).T
44-
45-
46-
47-
48-
49-
50-
51-
52-
53-
54-
55-
56-
57-
58-
59-
60-
61-
62-
63-
36+
try:
37+
upvotes.append(comment.ups)
38+
downvotes.append(comment.downs)
39+
contents.append(comment.body)
40+
except Exception as e:
41+
continue
42+
43+
votes = np.array([upvotes, downvotes]).T

0 commit comments

Comments
 (0)