diff --git a/MarketPlaces/MikesGrandStore/parser.py b/MarketPlaces/MikesGrandStore/parser.py index f6ae641..6a24815 100644 --- a/MarketPlaces/MikesGrandStore/parser.py +++ b/MarketPlaces/MikesGrandStore/parser.py @@ -85,144 +85,36 @@ def mikesGrandStore_description_parser(soup: BeautifulSoup) -> Tuple: return row -def mikesGtrandStore_listing_parser(soup: BeautifulSoup): - pass - -#parses description pages, so takes html pages of description pages using soup object, and parses it for info it needs -#stores info it needs in different lists, these lists are returned after being organized -#@param: soup object looking at html page of description page -#return: 'row' that contains a variety of lists that each hold info on the description page -def darkfox_description_parser(soup): - +def mikesGtrandStore_listing_parser(soup: BeautifulSoup) -> List: + # Fields to be parsed - - name = "-1" # 0 Product_Name - describe = "-1" # 1 Product_Description - lastSeen = "-1" # 2 Product_LastViewDate - rules = "-1" # 3 NOT USED ... - CVE = "-1" # 4 Product_CVE_Classification (Common Vulnerabilities and Exposures) - MS = "-1" # 5 Product_MS_Classification (Microsoft Security) - review = "-1" # 6 Product_Number_Of_Reviews - category = "-1" # 7 Product_Category - shipFrom = "-1" # 8 Product_ShippedFrom - shipTo = "-1" # 9 Product_ShippedTo - left = "-1" # 10 Product_QuantityLeft - escrow = "-1" # 11 Vendor_Warranty - terms = "-1" # 12 Vendor_TermsAndConditions - vendor = "-1" # 13 Vendor_Name - sold = "-1" # 14 Product_QuantitySold - addDate = "-1" # 15 Product_AddedDate - available = "-1" # 16 NOT USED ... - endDate = "-1" # 17 NOT USED ... - BTC = "-1" # 18 Product_BTC_SellingPrice - USD = "-1" # 19 Product_USD_SellingPrice - rating = "-1" # 20 Vendor_Rating - success = "-1" # 21 Vendor_Successful_Transactions - EURO = "-1" # 22 Product_EURO_SellingPrice - - # Finding Product Name - name = soup.find('h1').text - name = name.replace('\n', ' ') - name = name.replace(",", "") - name = name.strip() - - # Finding Vendor - vendor = soup.find('h3').find('a').text.strip() - - # Finding Vendor Rating - rating = soup.find('span', {'class': "tag is-dark"}).text.strip() - - # Finding Successful Transactions - success = soup.find('h3').text - success = success.replace("Vendor: ", "") - success = success.replace(vendor, "") - success = success.replace("(", "") - success = success.replace(")", "") - success = success.strip() - - bae = soup.find('div', {'class': "box"}).find_all('ul') - - # Finding Prices - USD = bae[1].find('strong').text.strip() - - li = bae[2].find_all('li') - - # Finding Escrow - escrow = li[0].find('span', {'class': "tag is-dark"}).text.strip() - - # Finding the Product Category - category = li[1].find('span', {'class': "tag is-dark"}).text.strip() - - # Finding the Product Quantity Available - left = li[3].find('span', {'class': "tag is-dark"}).text.strip() - - # Finding Number Sold - sold = li[4].find('span', {'class': "tag is-dark"}).text.strip() - - li = bae[3].find_all('li') - - # Finding Shipment Information (Origin) - if "Ships from:" in li[-2].text: - shipFrom = li[-2].text - shipFrom = shipFrom.replace("Ships from: ", "") - # shipFrom = shipFrom.replace(",", "") - shipFrom = shipFrom.strip() - - # Finding Shipment Information (Destination) - shipTo = li[-1].find('div', {'title': "List of countries is scrollable"}).text - shipTo = shipTo.replace("Ships to: ", "") - shipTo = shipTo.strip() - if "certain countries" in shipTo: - countries = "" - tags = li[-1].find_all('span', {'class': "tag"}) - for tag in tags: - country = tag.text.strip() - countries += country + ", " - shipTo = countries.strip(", ") - - # Finding the Product description - describe = soup.find('div', {'class': "pre-line"}).text - describe = describe.replace("\n", " ") - describe = describe.strip() - - '''# Finding the Number of Product Reviews - tag = soup.findAll(text=re.compile('Reviews')) - for index in tag: - reviews = index - par = reviews.find('(') - if par >=0: - reviews = reviews.replace("Reviews (","") - reviews = reviews.replace(")","") - reviews = reviews.split(",") - review = str(abs(int(reviews[0])) + abs(int(reviews[1]))) - else : - review = "-1"''' - - # Searching for CVE and MS categories - cve = soup.findAll(text=re.compile('CVE-\d{4}-\d{4}')) - if cve: - CVE = " " - for idx in cve: - CVE += (idx) - CVE += " " - CVE = CVE.replace(',', ' ') - CVE = CVE.replace('\n', '') - ms = soup.findAll(text=re.compile('MS\d{2}-\d{3}')) - if ms: - MS = " " - for im in ms: - MS += (im) - MS += " " - MS = MS.replace(',', ' ') - MS = MS.replace('\n', '') - - # Populating the final variable (this should be a list with all fields scraped) - row = (name, describe, lastSeen, rules, CVE, MS, review, category, shipFrom, shipTo, left, escrow, terms, vendor, - sold, addDate, available, endDate, BTC, USD, rating, success, EURO) - - # Sending the results - return row - + nm = 0 # Total_Products (Should be Integer) + mktName = "DarkFox" # 0 Marketplace_Name + name = [] # 1 Product_Name + CVE = [] # 2 Product_CVE_Classification (Common Vulnerabilities and Exposures) + MS = [] # 3 Product_MS_Classification (Microsoft Security) + category = [] # 4 Product_Category + describe = [] # 5 Product_Description + escrow = [] # 6 Vendor_Warranty + views = [] # 7 Product_Number_Of_Views + reviews = [] # 8 Product_Number_Of_Reviews + addDate = [] # 9 Product_AddDate + lastSeen = [] # 10 Product_LastViewDate + BTC = [] # 11 Product_BTC_SellingPrice + USD = [] # 12 Product_USD_SellingPrice + EURO = [] # 13 Product_EURO_SellingPrice + sold = [] # 14 Product_QuantitySold + qLeft =[] # 15 Product_QuantityLeft + shipFrom = [] # 16 Product_ShippedFrom + shipTo = [] # 17 Product_ShippedTo + vendor = [] # 18 Vendor + rating = [] # 19 Vendor_Rating + success = [] # 20 Vendor_Successful_Transactions + href = [] # 23 Product_Links (Urls) + + + + pass #parses listing pages, so takes html pages of listing pages using soup object, and parses it for info it needs #stores info it needs in different lists, these lists are returned after being organized diff --git a/MarketPlaces/ThiefWorld/parser.py b/MarketPlaces/ThiefWorld/parser.py index a239075..f091b53 100644 --- a/MarketPlaces/ThiefWorld/parser.py +++ b/MarketPlaces/ThiefWorld/parser.py @@ -1,10 +1,92 @@ __author__ = 'DarkWeb' # Here, we are importing the auxiliary functions to clean or convert data +from typing import List from MarketPlaces.Utilities.utilities import * # Here, we are importing BeautifulSoup to search through the HTML tree -from bs4 import BeautifulSoup +from bs4 import BeautifulSoup, Tag + + +def thiefWorld_description_parser(soup: BeautifulSoup) -> Tuple: + + # Fields to be parsed + + name = "-1" # 0 Product_Name + describe = "-1" # 1 Product_Description + lastSeen = "-1" # 2 Product_LastViewDate + rules = "-1" # 3 NOT USED ... + CVE = "-1" # 4 Product_CVE_Classification (Common Vulnerabilities and Exposures) + MS = "-1" # 5 Product_MS_Classification (Microsoft Security) + review = "-1" # 6 Product_Number_Of_Reviews + category = "-1" # 7 Product_Category + shipFrom = "-1" # 8 Product_ShippedFrom + shipTo = "-1" # 9 Product_ShippedTo + left = "-1" # 10 Product_QuantityLeft + escrow = "-1" # 11 Vendor_Warranty + terms = "-1" # 12 Vendor_TermsAndConditions + vendor = "-1" # 13 Vendor_Name + sold = "-1" # 14 Product_QuantitySold + addDate = "-1" # 15 Product_AddedDate + available = "-1" # 16 NOT USED ... + endDate = "-1" # 17 NOT USED ... + BTC = "-1" # 18 Product_BTC_SellingPrice + USD = "-1" # 19 Product_USD_SellingPrice + rating = "-1" # 20 Vendor_Rating + success = "-1" # 21 Vendor_Successful_Transactions + EURO = "-1" # 22 Product_EURO_SellingPrice + + name = soup.find("h1", {'class': 'title'}).text + name = cleanString(name.strip()) + + describe = soup.find('div', {'id': 'descriptionContent'}).text + describe = cleanString(describe.strip()) + + commentListTag: Tag = soup.find('ul', {'class': 'comment_list scrollbar'}) + commentList = commentListTag.find_all('li') + review = str(len(commentList)) + + citySelection: str = soup.find('ul', {'class': 'meta text-muted i_location'}).text + shipFrom = cleanString(citySelection.strip()) + + vendor = soup.find('h1', {'class': 'title over'}).text + vendor = cleanString(vendor.strip) + + usdTag: Tag = soup.find('div', {'class': 'product_price__big'}).find('span') + usdText = usdTag.text.strip('/')[0] + # usdText format: " USD " (i.e., "70 000 USD ") + USD = cleanString(usdText.replace("USD", "").strip()) + + ratingDiv = soup.find('div', {'class': 'rating_star'}) + rating = ratingDiv.get('title').strip(' ')[1] + + row = ( + name, + describe, + lastSeen, + rules, + CVE, + MS, + review, + category, + shipFrom, + shipTo, + left, + escrow, + terms, + vendor, + sold, + addDate, + available, + endDate, + BTC, + USD, + rating, + success, + EURO + ) + + return row #parses description pages, so takes html pages of description pages using soup object, and parses it for info it needs