-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathcrawlerrequests.py
46 lines (33 loc) · 965 Bytes
/
crawlerrequests.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
"""
This script is not required, RequestsCrawler can be called directly from a project.
- we just show off how it can be done
- it can be used to compare with other crawling scripts
"""
import json
import time
import argparse
import sys
from rsshistory import webtools
def main():
webtools.WebConfig.init()
webtools.WebConfig.use_print_logging()
parser = webtools.ScriptCrawlerParser()
parser.parse()
if not parser.is_valid():
sys.exit(1)
return
request = parser.get_request()
driver = webtools.RequestsCrawler(request, parser.args.output_file)
if parser.args.verbose:
print("Running request:{} with RequestsCrawler".format(request))
response = driver.run()
if not response:
print("No response")
sys.exit(1)
if parser.args.verbose:
print("Contents")
print(response.get_text())
print(response)
driver.save_response()
driver.close()
main()