1def run(self):
2 while True:
3 # Get target host
4 target = self.host_queue.get()
5 try:
6 # Build URL
7 url = "http://{0}/{1}".format(target, self.page)
8 # Launch browser scraper
9 br = mechanize.Browser()
10 br.set_handle_equiv(False)
11 br.set_handle_redirect(True)
12 br.set_handle_referer(False)
13 br.set_handle_robots(False)
14 scraped = br.open(url)
15 saved_name = str(target)+"."+str(self.page)
16 with open(os.path.join(self.save, saved_name), 'wb') as temp_file:
17 temp_file.write(str(scraped.read()))
18 print "Successfully scraped {}".format(url)
19 except:
20 print "Error with {}".format(target)
21 # Complete task in queue
22 self.host_queue.task_done()