forked from python-streamz/streamz
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathscrape.py
More file actions
63 lines (51 loc) · 1.52 KB
/
scrape.py
File metadata and controls
63 lines (51 loc) · 1.52 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import sys
from urllib.parse import urlparse
import requests
import toolz
from bs4 import BeautifulSoup
from streamz import Stream
def links_of_page(content_page):
(content, page) = content_page
uri = urlparse(page)
domain = '%s://%s' % (uri.scheme, uri.netloc)
try:
soup = BeautifulSoup(content, features="html.parser")
except Exception:
return []
else:
links = [link.get('href') for link in soup.find_all('a')]
return [domain + link
for link in links
if link
and link.startswith('/')
and '?' not in link
and link != '/']
def topk_dict(d, k=10):
return dict(toolz.topk(k, d.items(), key=lambda x: x[1]))
source = Stream()
pages = source.unique()
pages.sink(print)
content = (pages.map(requests.get)
.map(lambda x: x.content))
links = (content.zip(pages)
.map(links_of_page)
.flatten())
links.connect(source)
"""
from nltk.corpus import stopwords
stopwords = set(stopwords.words('english'))
word_counts = (content.map(str.split)
.concat()
.filter(str.isalpha)
.remove(stopwords.__contains__)
.frequencies())
top_words = (word_counts.map(topk_dict, k=10)
.map(frozenset)
.unique(history=10))
top_words.sink(print)
"""
if len(sys.argv) > 1:
try:
source.emit(sys.argv[1])
except KeyboardInterrupt:
pass