Skip to content

Commit 4d80013

Browse files
authored
添加proxy代理选项,这样可以使用代理进行fofa搜索,用于防止fofa账号被锁 (akkuman#7)
* 添加proxy代理选项,这样可以使用代理进行fofa搜索,用于防止fofa账号被锁 * remove vendor
1 parent bd6d442 commit 4d80013

File tree

2 files changed

+20
-6
lines changed

2 files changed

+20
-6
lines changed

cmd/rotateproxy/main.go

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ var (
1414
token string
1515
rule string
1616
pageCount int
17+
proxy string
1718
portPattern = regexp.MustCompile(`^\d+$`)
1819
)
1920

@@ -23,7 +24,8 @@ func init() {
2324
flag.StringVar(&baseCfg.Password, "pass", "", "authentication password")
2425
flag.StringVar(&email, "email", "", "email address")
2526
flag.StringVar(&token, "token", "", "token")
26-
flag.StringVar(&rule, "rule", `protocol=="socks5" && "Version:5 Method:No Authentication(0x00)" && after="2021-08-01" && country="CN"`, "search rule")
27+
flag.StringVar(&proxy, "proxy", "", "proxy")
28+
flag.StringVar(&rule, "rule", `protocol=="socks5" && "Version:5 Method:No Authentication(0x00)" && after="2022-02-01" && country="CN"`, "search rule")
2729
flag.IntVar(&baseCfg.IPRegionFlag, "region", 0, "0: all 1: cannot bypass gfw 2: bypass gfw")
2830
flag.IntVar(&baseCfg.SelectStrategy, "strategy", 3, "0: random, 1: Select the one with the shortest timeout, 2: Select the two with the shortest timeout, ...")
2931
flag.IntVar(&pageCount, "page", 5, "the page count you want to crawl")
@@ -52,7 +54,7 @@ func main() {
5254
baseCfg.ListenAddr = ":" + baseCfg.ListenAddr
5355
}
5456

55-
rotateproxy.StartRunCrawler(token, email, rule, pageCount)
57+
rotateproxy.StartRunCrawler(token, email, rule, pageCount, proxy)
5658
rotateproxy.StartCheckProxyAlive()
5759
c := rotateproxy.NewRedirectClient(rotateproxy.WithConfig(&baseCfg))
5860
c.Serve()

crawler.go

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
package rotateproxy
22

33
import (
4+
"crypto/tls"
45
"encoding/base64"
56
"encoding/json"
67
"fmt"
78
"net/http"
9+
"net/url"
810
"time"
911
)
1012

@@ -23,11 +25,20 @@ func addProxyURL(url string) {
2325
CreateProxyURL(url)
2426
}
2527

26-
func RunCrawler(fofaApiKey, fofaEmail, rule string, pageNum int) (err error) {
28+
func RunCrawler(fofaApiKey, fofaEmail, rule string, pageNum int, proxy string) (err error) {
2729
req, err := http.NewRequest("GET", "https://fofa.info/api/v1/search/all", nil)
2830
if err != nil {
2931
return err
3032
}
33+
tr := &http.Transport{TLSClientConfig: &tls.Config{
34+
InsecureSkipVerify: true,
35+
}}
36+
if proxy != "" {
37+
proxyUrl, err := url.Parse(proxy)
38+
if err == nil { // 使用传入代理
39+
tr.Proxy = http.ProxyURL(proxyUrl)
40+
}
41+
}
3142
rule = base64.StdEncoding.EncodeToString([]byte(rule))
3243
q := req.URL.Query()
3344
q.Add("email", fofaEmail)
@@ -37,7 +48,8 @@ func RunCrawler(fofaApiKey, fofaEmail, rule string, pageNum int) (err error) {
3748
q.Add("page", fmt.Sprintf("%d", pageNum))
3849
q.Add("fields", "host,title,ip,domain,port,country,city,server,protocol")
3950
req.URL.RawQuery = q.Encode()
40-
resp, err := http.DefaultClient.Do(req)
51+
// resp, err := http.DefaultClient.Do(req)
52+
resp, err := (&http.Client{Transport: tr}).Do(req)
4153
if err != nil {
4254
return err
4355
}
@@ -57,10 +69,10 @@ func RunCrawler(fofaApiKey, fofaEmail, rule string, pageNum int) (err error) {
5769
return
5870
}
5971

60-
func StartRunCrawler(fofaApiKey, fofaEmail, rule string, pageCount int) {
72+
func StartRunCrawler(fofaApiKey, fofaEmail, rule string, pageCount int, proxy string) {
6173
runCrawlerFunc := func() {
6274
for i := 1; i <= 3; i++ {
63-
err := RunCrawler(fofaApiKey, fofaEmail, rule, i)
75+
err := RunCrawler(fofaApiKey, fofaEmail, rule, i, proxy)
6476
if err != nil {
6577
fmt.Printf("[!] error: %v\n", err)
6678
}

0 commit comments

Comments
 (0)