|
|
@ -1,4 +1,4 @@ |
|
|
|
__author__ = 'Helium' |
|
|
|
__author__ = 'cern' |
|
|
|
|
|
|
|
# Here, we are importing the auxiliary functions to clean or convert data |
|
|
|
from MarketPlaces.Utilities.utilities import * |
|
|
@ -11,111 +11,107 @@ from bs4 import BeautifulSoup |
|
|
|
#stores info it needs in different lists, these lists are returned after being organized |
|
|
|
#@param: soup object looking at html page of description page |
|
|
|
#return: 'row' that contains a variety of lists that each hold info on the description page |
|
|
|
def darkfox_description_parser(soup): |
|
|
|
def blackpyramid_description_parser(soup): |
|
|
|
|
|
|
|
# Fields to be parsed |
|
|
|
|
|
|
|
name = "-1" # 0 Product_Name |
|
|
|
describe = "-1" # 1 Product_Description |
|
|
|
lastSeen = "-1" # 2 Product_LastViewDate |
|
|
|
rules = "-1" # 3 NOT USED ... |
|
|
|
CVE = "-1" # 4 Product_CVE_Classification (Common Vulnerabilities and Exposures) |
|
|
|
MS = "-1" # 5 Product_MS_Classification (Microsoft Security) |
|
|
|
review = "-1" # 6 Product_Number_Of_Reviews |
|
|
|
vendor = "-1" # 0 *Vendor_Name |
|
|
|
success = "-1" # 1 Vendor_Successful_Transactions |
|
|
|
rating_vendor = "-1" # 2 Vendor_Rating |
|
|
|
name = "-1" # 3 *Product_Name |
|
|
|
describe = "-1" # 4 Product_Description |
|
|
|
CVE = "-1" # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) |
|
|
|
MS = "-1" # 6 Product_MS_Classification (Microsoft Security) |
|
|
|
category = "-1" # 7 Product_Category |
|
|
|
shipFrom = "-1" # 8 Product_ShippedFrom |
|
|
|
shipTo = "-1" # 9 Product_ShippedTo |
|
|
|
left = "-1" # 10 Product_QuantityLeft |
|
|
|
escrow = "-1" # 11 Vendor_Warranty |
|
|
|
terms = "-1" # 12 Vendor_TermsAndConditions |
|
|
|
vendor = "-1" # 13 Vendor_Name |
|
|
|
sold = "-1" # 14 Product_QuantitySold |
|
|
|
addDate = "-1" # 15 Product_AddedDate |
|
|
|
available = "-1" # 16 NOT USED ... |
|
|
|
endDate = "-1" # 17 NOT USED ... |
|
|
|
BTC = "-1" # 18 Product_BTC_SellingPrice |
|
|
|
USD = "-1" # 19 Product_USD_SellingPrice |
|
|
|
rating = "-1" # 20 Vendor_Rating |
|
|
|
success = "-1" # 21 Vendor_Successful_Transactions |
|
|
|
EURO = "-1" # 22 Product_EURO_SellingPrice |
|
|
|
views = "-1" # 8 Product_Number_Of_Views |
|
|
|
reviews = "-1" # 9 Product_Number_Of_Reviews |
|
|
|
rating_item = "-1" # 10 Product_Rating |
|
|
|
addDate = "-1" # 11 Product_AddedDate |
|
|
|
BTC = "-1" # 12 Product_BTC_SellingPrice |
|
|
|
USD = "-1" # 13 Product_USD_SellingPrice |
|
|
|
EURO = "-1" # 14 Product_EURO_SellingPrice |
|
|
|
sold = "-1" # 15 Product_QuantitySold |
|
|
|
left = "-1" # 16 Product_QuantityLeft |
|
|
|
shipFrom = "-1" # 17 Product_ShippedFrom |
|
|
|
shipTo = "-1" # 18 Product_ShippedTo |
|
|
|
image = "-1" # 19 Product_Image |
|
|
|
vendor_image = "-1" # 20 Vendor_Image |
|
|
|
|
|
|
|
# Finding Product Name |
|
|
|
name = soup.find('h1').text |
|
|
|
name = soup.find('div', {'class': 'panel39002'}).find('span').next_sibling |
|
|
|
name = name.replace('\n', ' ') |
|
|
|
name = name.replace(",", "") |
|
|
|
name = name.strip() |
|
|
|
|
|
|
|
# product description |
|
|
|
describe = soup.findAll('div', {'class': 'fer048953'})[1].text |
|
|
|
describe = describe.replace('\n', ' ') |
|
|
|
describe = describe.replace(",", "") |
|
|
|
describe = describe.strip() |
|
|
|
|
|
|
|
# Finding Vendor |
|
|
|
vendor = soup.find('h3').find('a').text.strip() |
|
|
|
vendor = soup.find('div', {'class': 'bold03905 vstat364'}).text |
|
|
|
vendor = vendor.split(" ") |
|
|
|
vendor = vendor[2][:-1] |
|
|
|
vendor = vendor.replace('\n', ' ') |
|
|
|
vendor = vendor.replace(",", "") |
|
|
|
vendor = vendor.strip() |
|
|
|
|
|
|
|
# Finding Vendor Rating |
|
|
|
rating = soup.find('span', {'class': "tag is-dark"}).text.strip() |
|
|
|
rating_span = soup.find('span', {'class': 'to3098503t'}).find_next_sibling('span') |
|
|
|
rating_num = rating_span.find('b').text |
|
|
|
if rating_num != 'N/A': |
|
|
|
rating = rating_num[0:3] |
|
|
|
|
|
|
|
# Finding Successful Transactions |
|
|
|
success = soup.find('h3').text |
|
|
|
success = success.replace("Vendor: ", "") |
|
|
|
success = success.replace(vendor, "") |
|
|
|
success = success.replace("(", "") |
|
|
|
success = success.replace(")", "") |
|
|
|
success_container = soup.find('ul', {'class': 'ul3o00953'}).findAll('li')[1] |
|
|
|
success = success_container.find('div').text |
|
|
|
success = success.replace('"', '') |
|
|
|
success = success.replace("\n", " ") |
|
|
|
success = success.replace(",", "") |
|
|
|
success = success.strip() |
|
|
|
|
|
|
|
bae = soup.find('div', {'class': "box"}).find_all('ul') |
|
|
|
|
|
|
|
# Finding Prices |
|
|
|
USD = bae[1].find('strong').text.strip() |
|
|
|
|
|
|
|
li = bae[2].find_all('li') |
|
|
|
USD_text = soup.find('li', {'class': 'vul2994 vghul995'}).find('div').text |
|
|
|
USD = USD_text.split(',')[1] |
|
|
|
USD = USD.replace('\n', ' ') |
|
|
|
USD = USD.replace(",", "") |
|
|
|
USD = USD.strip() |
|
|
|
|
|
|
|
# Finding Escrow |
|
|
|
escrow = li[0].find('span', {'class': "tag is-dark"}).text.strip() |
|
|
|
|
|
|
|
# Finding the Product Category |
|
|
|
category = li[1].find('span', {'class': "tag is-dark"}).text.strip() |
|
|
|
|
|
|
|
# Finding the Product Quantity Available |
|
|
|
left = li[3].find('span', {'class': "tag is-dark"}).text.strip() |
|
|
|
container = soup.find('ul', {'class': 'bic03095'}) |
|
|
|
|
|
|
|
# Finding Number Sold |
|
|
|
sold = li[4].find('span', {'class': "tag is-dark"}).text.strip() |
|
|
|
|
|
|
|
li = bae[3].find_all('li') |
|
|
|
|
|
|
|
# Finding Shipment Information (Origin) |
|
|
|
if "Ships from:" in li[-2].text: |
|
|
|
shipFrom = li[-2].text |
|
|
|
shipFrom = shipFrom.replace("Ships from: ", "") |
|
|
|
# shipFrom = shipFrom.replace(",", "") |
|
|
|
shipFrom = shipFrom.strip() |
|
|
|
|
|
|
|
# Finding Shipment Information (Destination) |
|
|
|
shipTo = li[-1].find('div', {'title': "List of countries is scrollable"}).text |
|
|
|
shipTo = shipTo.replace("Ships to: ", "") |
|
|
|
shipTo = shipTo.strip() |
|
|
|
if "certain countries" in shipTo: |
|
|
|
countries = "" |
|
|
|
tags = li[-1].find_all('span', {'class': "tag"}) |
|
|
|
for tag in tags: |
|
|
|
country = tag.text.strip() |
|
|
|
countries += country + ", " |
|
|
|
shipTo = countries.strip(", ") |
|
|
|
|
|
|
|
# Finding the Product description |
|
|
|
describe = soup.find('div', {'class': "pre-line"}).text |
|
|
|
describe = describe.replace("\n", " ") |
|
|
|
describe = describe.strip() |
|
|
|
sold_container = container.find('li') |
|
|
|
sold_div = sold_container.findAll('div')[2] |
|
|
|
sold = sold_div.find('b').next_sibling |
|
|
|
sold = sold.replace('"', '') |
|
|
|
sold = sold.replace("\n", " ") |
|
|
|
sold = sold.replace(",", "") |
|
|
|
sold = sold.strip() |
|
|
|
|
|
|
|
'''# Finding the Number of Product Reviews |
|
|
|
tag = soup.findAll(text=re.compile('Reviews')) |
|
|
|
for index in tag: |
|
|
|
reviews = index |
|
|
|
par = reviews.find('(') |
|
|
|
if par >=0: |
|
|
|
reviews = reviews.replace("Reviews (","") |
|
|
|
reviews = reviews.replace(")","") |
|
|
|
reviews = reviews.split(",") |
|
|
|
review = str(abs(int(reviews[0])) + abs(int(reviews[1]))) |
|
|
|
else : |
|
|
|
review = "-1"''' |
|
|
|
# Finding the Product Quantity Available |
|
|
|
left_container = container.find('li') |
|
|
|
left_div = left_container.findAll('div')[3] |
|
|
|
left = left_div.find('b').next_sibling |
|
|
|
left = left.replace('"', '') |
|
|
|
left = left.replace("\n", " ") |
|
|
|
left = left.replace(",", "") |
|
|
|
left = left.strip() |
|
|
|
|
|
|
|
# Finding number of reviews |
|
|
|
positive = soup.find('span', {'class': 'ar04999324'}).text |
|
|
|
neutral = soup.find('span', {'class': 'ti9400005 can39953'}).text |
|
|
|
negative = soup.find('span', {'class': 'ti9400005 ti90088 can39953'}).text |
|
|
|
review = int(positive) + int(neutral) + int(negative) |
|
|
|
|
|
|
|
# Finding product image |
|
|
|
image = soup.find('img', {'class': 'img0390503'}) |
|
|
|
image = image.get('src') |
|
|
|
image = image.split('base64,')[-1] |
|
|
|
|
|
|
|
vendor_image = soup.find('img', {'class': 'img0390503'}) |
|
|
|
vendor_image = vendor_image.get('src') |
|
|
|
vendor_image = vendor_image.split('base64,')[-1] |
|
|
|
|
|
|
|
# Searching for CVE and MS categories |
|
|
|
cve = soup.findAll(text=re.compile('CVE-\d{4}-\d{4}')) |
|
|
@ -136,8 +132,8 @@ def darkfox_description_parser(soup): |
|
|
|
MS = MS.replace('\n', '') |
|
|
|
|
|
|
|
# Populating the final variable (this should be a list with all fields scraped) |
|
|
|
row = (name, describe, lastSeen, rules, CVE, MS, review, category, shipFrom, shipTo, left, escrow, terms, vendor, |
|
|
|
sold, addDate, available, endDate, BTC, USD, rating, success, EURO) |
|
|
|
row = (vendor, rating_vendor, success, name, describe, CVE, MS, category, views, reviews, rating_item, addDate, |
|
|
|
BTC, USD, EURO, sold, left, shipFrom, shipTo, image, vendor_image) |
|
|
|
|
|
|
|
# Sending the results |
|
|
|
return row |
|
|
@ -147,102 +143,109 @@ def darkfox_description_parser(soup): |
|
|
|
#stores info it needs in different lists, these lists are returned after being organized |
|
|
|
#@param: soup object looking at html page of listing page |
|
|
|
#return: 'row' that contains a variety of lists that each hold info on the listing page |
|
|
|
def darkfox_listing_parser(soup): |
|
|
|
def blackpyramid_listing_parser(soup): |
|
|
|
|
|
|
|
# Fields to be parsed |
|
|
|
nm = 0 # Total_Products (Should be Integer) |
|
|
|
mktName = "DarkFox" # 0 Marketplace_Name |
|
|
|
name = [] # 1 Product_Name |
|
|
|
CVE = [] # 2 Product_CVE_Classification (Common Vulnerabilities and Exposures) |
|
|
|
MS = [] # 3 Product_MS_Classification (Microsoft Security) |
|
|
|
category = [] # 4 Product_Category |
|
|
|
describe = [] # 5 Product_Description |
|
|
|
escrow = [] # 6 Vendor_Warranty |
|
|
|
views = [] # 7 Product_Number_Of_Views |
|
|
|
reviews = [] # 8 Product_Number_Of_Reviews |
|
|
|
addDate = [] # 9 Product_AddDate |
|
|
|
lastSeen = [] # 10 Product_LastViewDate |
|
|
|
BTC = [] # 11 Product_BTC_SellingPrice |
|
|
|
USD = [] # 12 Product_USD_SellingPrice |
|
|
|
EURO = [] # 13 Product_EURO_SellingPrice |
|
|
|
sold = [] # 14 Product_QuantitySold |
|
|
|
qLeft =[] # 15 Product_QuantityLeft |
|
|
|
shipFrom = [] # 16 Product_ShippedFrom |
|
|
|
shipTo = [] # 17 Product_ShippedTo |
|
|
|
vendor = [] # 18 Vendor |
|
|
|
rating = [] # 19 Vendor_Rating |
|
|
|
success = [] # 20 Vendor_Successful_Transactions |
|
|
|
href = [] # 23 Product_Links (Urls) |
|
|
|
|
|
|
|
listing = soup.findAll('div', {"class": "card"}) |
|
|
|
nm = 0 # *Total_Products (Should be Integer) |
|
|
|
mktName = "Black Pyramid" # 0 *Marketplace_Name |
|
|
|
vendor = [] # 1 *Vendor y |
|
|
|
rating_vendor = [] # 2 Vendor_Rating |
|
|
|
success = [] # 3 Vendor_Successful_Transactions |
|
|
|
name = [] # 4 *Product_Name y |
|
|
|
CVE = [] # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) dont worry about this |
|
|
|
MS = [] # 6 Product_MS_Classification (Microsoft Security) dont worry about this |
|
|
|
category = [] # 7 Product_Category y |
|
|
|
describe = [] # 8 Product_Description |
|
|
|
views = [] # 9 Product_Number_Of_Views |
|
|
|
reviews = [] # 10 Product_Number_Of_Reviews |
|
|
|
rating_item = [] # 11 Product_Rating |
|
|
|
addDate = [] # 12 Product_AddDate |
|
|
|
BTC = [] # 13 Product_BTC_SellingPrice |
|
|
|
USD = [] # 14 Product_USD_SellingPrice y |
|
|
|
EURO = [] # 15 Product_EURO_SellingPrice |
|
|
|
sold = [] # 16 Product_QuantitySold |
|
|
|
qLeft = [] # 17 Product_QuantityLeft |
|
|
|
shipFrom = [] # 18 Product_ShippedFrom |
|
|
|
shipTo = [] # 19 Product_ShippedTo |
|
|
|
image = [] # 20 Product_Image |
|
|
|
image_vendor = [] # 21 Vendor_Image |
|
|
|
href = [] # 22 Product_Links |
|
|
|
|
|
|
|
listing = soup.findAll('article', {"class": "product"}) |
|
|
|
|
|
|
|
# Some listing pages have an additional article section which is blank |
|
|
|
if not listing[-1].findAll('a', href=True): |
|
|
|
listing = listing[:-1] |
|
|
|
|
|
|
|
|
|
|
|
# Populating the Number of Products |
|
|
|
nm = len(listing) |
|
|
|
|
|
|
|
for a in listing: |
|
|
|
bae = a.findAll('a', href=True) |
|
|
|
for card in listing: |
|
|
|
bae = card.findAll('a', href=True) |
|
|
|
|
|
|
|
# Adding the url to the list of urls |
|
|
|
link = bae[0].get('href') |
|
|
|
link = cleanLink(link) |
|
|
|
link = bae[2].get('href') |
|
|
|
href.append(link) |
|
|
|
|
|
|
|
# Finding the Product |
|
|
|
product = bae[1].find('p').text |
|
|
|
product = bae[3].text |
|
|
|
product = product.replace('\n', ' ') |
|
|
|
product = product.replace(",", "") |
|
|
|
product = product.replace("...", "") |
|
|
|
product = product.strip() |
|
|
|
name.append(product) |
|
|
|
|
|
|
|
bae = a.find('div', {'class': "media-content"}).find('div').find_all('div') |
|
|
|
|
|
|
|
if len(bae) >= 5: |
|
|
|
# Finding Prices |
|
|
|
price = bae[0].text |
|
|
|
ud = price.replace(" USD", " ") |
|
|
|
# u = ud.replace("$","") |
|
|
|
u = ud.replace(",", "") |
|
|
|
u = u.strip() |
|
|
|
USD.append(u) |
|
|
|
# bc = (prc[1]).strip(' BTC') |
|
|
|
# BTC.append(bc) |
|
|
|
|
|
|
|
# Finding the Vendor |
|
|
|
vendor_name = bae[1].find('a').text |
|
|
|
vendor_name = vendor_name.replace(",", "") |
|
|
|
vendor_name = vendor_name.strip() |
|
|
|
vendor.append(vendor_name) |
|
|
|
|
|
|
|
# Finding the Category |
|
|
|
cat = bae[2].find('small').text |
|
|
|
cat = cat.replace("Category: ", "") |
|
|
|
cat = cat.replace(",", "") |
|
|
|
cat = cat.strip() |
|
|
|
category.append(cat) |
|
|
|
|
|
|
|
# Finding Number Sold and Quantity Left |
|
|
|
num = bae[3].text |
|
|
|
num = num.replace("Sold: ", "") |
|
|
|
num = num.strip() |
|
|
|
sold.append(num) |
|
|
|
|
|
|
|
quant = bae[4].find('small').text |
|
|
|
quant = quant.replace("In stock: ", "") |
|
|
|
quant = quant.strip() |
|
|
|
qLeft.append(quant) |
|
|
|
|
|
|
|
# Finding Successful Transactions |
|
|
|
freq = bae[1].text |
|
|
|
freq = freq.replace(vendor_name, "") |
|
|
|
freq = re.sub(r'Vendor Level \d+', "", freq) |
|
|
|
freq = freq.replace("(", "") |
|
|
|
freq = freq.replace(")", "") |
|
|
|
freq = freq.strip() |
|
|
|
success.append(freq) |
|
|
|
# Finding description |
|
|
|
# 'recurisve = False' only searches direct children |
|
|
|
desc = card.findChildren('div', recursive=False)[0] |
|
|
|
desc = desc.findAll('div', recursive=False)[3].text |
|
|
|
desc = desc.replace('\n', ' ') |
|
|
|
desc = desc.replace(",", "") |
|
|
|
desc = desc.strip() |
|
|
|
describe.append(desc) |
|
|
|
|
|
|
|
# Finding Vendor Name |
|
|
|
vendor_name = bae[4].find('span').text |
|
|
|
vendor_name = vendor_name.split(' ')[1] |
|
|
|
vendor_name = vendor_name.replace('\n', ' ') |
|
|
|
vendor_name = vendor_name.replace(",", "") |
|
|
|
vendor_name = vendor_name.strip() |
|
|
|
vendor.append(vendor_name) |
|
|
|
|
|
|
|
# Finding the Category |
|
|
|
cat = card.findAll('div', recursive=False)[0].findAll('div', recursive=False)[1].find('span').text |
|
|
|
cat = cat.replace("\n", "") |
|
|
|
cat = cat.replace(",", "") |
|
|
|
cat = cat.strip() |
|
|
|
category.append(cat) |
|
|
|
|
|
|
|
bae = card.findAll('div', recursive=False)[1].findAll('div', recursive=False)[1] |
|
|
|
|
|
|
|
# Finding amount left |
|
|
|
left = bae.findAll('div', recursive=False)[1].text |
|
|
|
left = left.replace("x", "") |
|
|
|
left = left.replace('\n', ' ') |
|
|
|
left = left.replace(",", "") |
|
|
|
left = left.strip() |
|
|
|
qLeft.append(left) |
|
|
|
|
|
|
|
# Finding amount sold |
|
|
|
qsold = bae.findAll('div', recursive=False)[2].text |
|
|
|
qsold = qsold.replace('\n', ' ') |
|
|
|
qsold = qsold.replace("x", "") |
|
|
|
qsold = qsold.replace(",", "") |
|
|
|
qsold = qsold.strip() |
|
|
|
sold.append(qsold) |
|
|
|
|
|
|
|
# Finding product image |
|
|
|
product_image = card.find('img') |
|
|
|
product_image = product_image.get('src') |
|
|
|
product_image = product_image.split('base64,')[-1] |
|
|
|
image.append(product_image) |
|
|
|
|
|
|
|
# Searching for CVE and MS categories |
|
|
|
cve = a.findAll(text=re.compile('CVE-\d{4}-\d{4}')) |
|
|
|
cve = card.findAll(text=re.compile('CVE-\d{4}-\d{4}')) |
|
|
|
if not cve: |
|
|
|
cveValue="-1" |
|
|
|
else: |
|
|
@ -255,7 +258,7 @@ def darkfox_listing_parser(soup): |
|
|
|
cveValue=cee |
|
|
|
CVE.append(cveValue) |
|
|
|
|
|
|
|
ms = a.findAll(text=re.compile('MS\d{2}-\d{3}')) |
|
|
|
ms = card.findAll(text=re.compile('MS\d{2}-\d{3}')) |
|
|
|
if not ms: |
|
|
|
MSValue="-1" |
|
|
|
else: |
|
|
@ -269,23 +272,24 @@ def darkfox_listing_parser(soup): |
|
|
|
MS.append(MSValue) |
|
|
|
|
|
|
|
# Populate the final variable (this should be a list with all fields scraped) |
|
|
|
return organizeProducts(mktName, nm, name, CVE, MS, category, describe, escrow, views, reviews, addDate, lastSeen, |
|
|
|
BTC, USD, EURO, qLeft, shipFrom, shipTo, vendor, rating, success, sold, href) |
|
|
|
return organizeProducts(mktName, nm, vendor, rating_vendor, success, name, CVE, MS, category, describe, views, |
|
|
|
reviews, rating_item, addDate, BTC, USD, EURO, sold, qLeft, shipFrom, shipTo, href, image, |
|
|
|
image_vendor) |
|
|
|
|
|
|
|
|
|
|
|
#called by the crawler to get description links on a listing page |
|
|
|
#@param: beautifulsoup object that is using the correct html page (listing page) |
|
|
|
#return: list of description links from a listing page |
|
|
|
def blackpyramid_links_parser(soup): |
|
|
|
def BlackPyramid_links_parser(soup): |
|
|
|
|
|
|
|
# Returning all links that should be visited by the Crawler |
|
|
|
href = [] |
|
|
|
|
|
|
|
listing = soup.findAll('article', {"class": "product"}) |
|
|
|
|
|
|
|
for div in listing: |
|
|
|
for item in listing: |
|
|
|
|
|
|
|
link = div.find('a', {"class": "ah39063"})['href'] |
|
|
|
link = item.find('a', {"class": "ah39063"})['href'] |
|
|
|
href.append(link) |
|
|
|
|
|
|
|
return href |
|
|
|
return href |