__author__ = 'DarkWeb' # Here, we are importing the auxiliary functions to clean or convert data from MarketPlaces.Utilities.utilities import * # Here, we are importing BeautifulSoup to search through the HTML tree from bs4 import BeautifulSoup # This is the method to parse the Description Pages (one page to each Product in the Listing Pages) def hiddenmarket_description_parser(soup): # Fields to be parsed vendor = "-1" # 0 *Vendor_Name success = "-1" # 1 Vendor_Successful_Transactions rating_vendor = "-1" # 2 Vendor_Rating name = "-1" # 3 *Product_Name describe = "-1" # 4 Product_Description CVE = "-1" # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) MS = "-1" # 6 Product_MS_Classification (Microsoft Security) category = "-1" # 7 Product_Category views = "-1" # 8 Product_Number_Of_Views reviews = "-1" # 9 Product_Number_Of_Reviews rating_item = "-1" # 10 Product_Rating addDate = "-1" # 11 Product_AddedDate BTC = "-1" # 12 Product_BTC_SellingPrice USD = "-1" # 13 Product_USD_SellingPrice EURO = "-1" # 14 Product_EURO_SellingPrice sold = "-1" # 15 Product_QuantitySold left = "-1" # 16 Product_QuantityLeft shipFrom = "-1" # 17 Product_ShippedFrom shipTo = "-1" # 18 Product_ShippedTo image = "-1" # 19 Product_Image vendor_image = "-1" # 20 Vendor_Image bae = soup.find('div', {'class': "main"}) # Finding Product Name name = bae.find('div', {'class': "heading"}).text name = name.replace('\n', ' ') name = name.replace(",", "") name = name.strip() mb = bae.find('div', {'class': "information"}).findAll('tr') # Finding Vendor vendor = mb[1].find('a').text vendor = vendor.replace(",", "") vendor = vendor.strip() # # Finding Vendor Rating # full_stars = bae[2].find_all('i', {'class': "fas fa-star"}) # half_star = bae[2].find('i', {'class': "fas fa-star-half-alt"}) # rating = len(full_stars) + (0.5 if half_star is not None else 0) # Finding Quantity Left temp = mb[-3].text left = temp.replace("Quantity in stock:", "") left = left.strip() # Finding USD USD = mb[0].text USD = USD.replace("Price:", "") USD = USD.replace("USD", "") USD = USD.strip() # Finding BTC # temp = bae.find('div', {"class": "small"}).text.split("BTC") # BTC = temp[0].strip() # Finding Shipment Information (Origin) shipFrom = mb[2].text shipFrom = shipFrom.replace("Seller location:", "") shipFrom = shipFrom.strip() # Finding Shipment Information (Destination) shipTo = mb[3].text shipTo = shipTo.replace("Ships to (seller):", "") shipTo = shipTo.strip() # Finding the Product description describe = bae.find('div', {"class": "twotabs"}).find('div', {'class': "tab1"}).text describe = describe.replace("\n", " ") describe = describe.replace("\r", " ") describe = describe.replace("-", " ") describe = describe.strip() # Finding Product Image image = soup.find('div', {"class": "thumbnails"}).find('img', {"class": "bigthumbnail"}) image = image.get('src').split('base64,')[-1] # Finding the Product Category category = mb[-4].text category = category.replace("Category:", "") category = category.strip() #Finding the number of reviews reviews = bae.find_all('div', {'class': "heading"}) reviews = reviews[-2].text reviews = reviews.replace("Comments (", "") reviews = reviews.replace(")", "") # Searching for CVE and MS categories cve = soup.findAll(text=re.compile('CVE-\d{4}-\d{4}')) if cve: CVE = " " for idx in cve: CVE += (idx) CVE += " " CVE = CVE.replace(',', ' ') CVE = CVE.replace('\n', '') ms = soup.findAll(text=re.compile('MS\d{2}-\d{3}')) if ms: MS = " " for im in ms: MS += (im) MS += " " MS = MS.replace(',', ' ') MS = MS.replace('\n', '') # Populating the final variable (this should be a list with all fields scraped) row = (vendor, rating_vendor, success, name, describe, CVE, MS, category, views, reviews, rating_item, addDate, BTC, USD, EURO, sold, left, shipFrom, shipTo, image, vendor_image) # Sending the results return row # This is the method to parse the Listing Pages def hiddenmarket_listing_parser(soup): # Fields to be parsed nm = 0 # *Total_Products (Should be Integer) mktName = "HiddenMarket" # 0 *Marketplace_Name vendor = [] # 1 *Vendor y rating_vendor = [] # 2 Vendor_Rating success = [] # 3 Vendor_Successful_Transactions name = [] # 4 *Product_Name y CVE = [] # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) MS = [] # 6 Product_MS_Classification (Microsoft Security) category = [] # 7 Product_Category y describe = [] # 8 Product_Description views = [] # 9 Product_Number_Of_Views reviews = [] # 10 Product_Number_Of_Reviews rating_item = [] # 11 Product_Rating addDate = [] # 12 Product_AddDate BTC = [] # 13 Product_BTC_SellingPrice USD = [] # 14 Product_USD_SellingPrice y EURO = [] # 15 Product_EURO_SellingPrice sold = [] # 16 Product_QuantitySold qLeft = [] # 17 Product_QuantityLeft shipFrom = [] # 18 Product_ShippedFrom shipTo = [] # 19 Product_ShippedTo image = [] # 20 Product_Image image_vendor = [] # 21 Vendor_Image href = [] # 22 Product_Links listing = soup.findAll('div', {"class": "item"}) # Populating the Number of Products nm = len(listing) # Finding Category cat = soup.find("div", {'class': "heading"}).text cat = cat.replace(",", "") cat = cat.strip() for card in listing: category.append(cat) # Adding the url to the list of urls link = card.find_all('a') link = link[1].get('href') href.append(link) # Finding Product Name product = card.next_sibling.find('div', {'class': "title"}) product = product.text product = product.replace('\n', ' ') product = product.replace(",", "") product = product.strip() name.append(product) # Finding Product Image image.append("-1") # Finding Vendor vendor_name = card.text vendor_name = vendor_name.replace(",", "") vendor_name = vendor_name.strip() vendor.append(vendor_name) image_vendor.append("-1") # Finding USD usd = card.next_sibling.find('div', {"class": "buttons"}).find('div', {'class': "price"}).text usd = usd.replace("USD", "") usd = usd.strip() USD.append(usd) tb = card.next_sibling.find("div", {"class": "stats"}) tb = tb.find_all('td') # Finding Reviews num = tb[-1].text num = num.strip() reviews.append(num) # Finding Views view = tb[-3].text.strip() views.append(view) # Finding Num of Sales sale = tb[-2].text.strip() sold.append(sale) # Finding Item Rating if num == '0': item_rating = '-1' else: item_rating = card.next_sibling.find('div', {'class': 'stats'}).find('div', {'class': "stars2"}) item_rating = item_rating.get('style') item_rating = item_rating.replace("width:", "") item_rating = item_rating.replace("%", "") item_rating = (float(item_rating) * 5.0) / 100.0 item_rating = "{:.{}f}".format(item_rating, 2) rating_item.append(item_rating) # Finding shipping info shipping = card.next_sibling.find('div', {'class': "shipping"}).text.split('>') # SHip from origin = shipping[0].strip() shipFrom.append(origin) #Ship to destination = shipping[1].strip() shipTo.append(destination) # Finding description (site only shows partial description on listing pages) # description = card.next_sibling.find('div', {'class': "description"}).text # description = description.replace("\n", " ") # description = description.replace("\r", " ") # description = description.replace("-", " ") # description = description.strip() # describe.append(description) # Searching for CVE and MS categories cve = card.findAll(text=re.compile('CVE-\d{4}-\d{4}')) if not cve: cveValue = "-1" else: cee = " " for idx in cve: cee += (idx) cee += " " cee = cee.replace(',', ' ') cee = cee.replace('\n', '') cveValue = cee CVE.append(cveValue) ms = card.findAll(text=re.compile('MS\d{2}-\d{3}')) if not ms: MSValue = "-1" else: me = " " for im in ms: me += (im) me += " " me = me.replace(',', ' ') me = me.replace('\n', '') MSValue = me MS.append(MSValue) # Populate the final variable (this should be a list with all fields scraped) return organizeProducts(mktName, nm, vendor, rating_vendor, success, name, CVE, MS, category, describe, views, reviews, rating_item, addDate, BTC, USD, EURO, sold, qLeft, shipFrom, shipTo, href, image, image_vendor) def hiddenmarket_links_parser(soup): # Returning all links that should be visited by the Crawler href = [] listing = soup.findAll('div', {"class": "item"}) for div in listing: link = div.findAll('a') link = link[1] link = link['href'] href.append(link) return href