From ac7cc85d0e01d1e0638cf907367c0c722db4ac18 Mon Sep 17 00:00:00 2001 From: LynnTaka Date: Wed, 5 Jun 2024 03:10:31 +0000 Subject: [PATCH] Added incomplete parser for abacus manually --- MarketPlaces/Abacus/parser.py | 228 ++++++++++++++++++++++++++++++++++ 1 file changed, 228 insertions(+) create mode 100644 MarketPlaces/Abacus/parser.py diff --git a/MarketPlaces/Abacus/parser.py b/MarketPlaces/Abacus/parser.py new file mode 100644 index 0000000..fe3c561 --- /dev/null +++ b/MarketPlaces/Abacus/parser.py @@ -0,0 +1,228 @@ +__author__ = 'DarkWeb' + +# Here, we are importing the auxiliary functions to clean or convert data +from MarketPlaces.Utilities.utilities import * + +# Here, we are importing BeautifulSoup to search through the HTML tree +from bs4 import BeautifulSoup + + +# parses description pages, so takes html pages of description pages using soup object, and parses it for info it needs +# stores info it needs in different lists, these lists are returned after being organized +# @param: soup object looking at html page of description page +# return: 'row' that contains a variety of lists that each hold info on the description page +def abacus_description_parser(soup): + # Fields to be parsed + + vendor = "-1" # 0 *Vendor_Name + success = "-1" # 1 Vendor_Successful_Transactions + rating_vendor = "-1" # 2 Vendor_Rating + name = "-1" # 3 *Product_Name + describe = "-1" # 4 Product_Description + CVE = "-1" # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) + MS = "-1" # 6 Product_MS_Classification (Microsoft Security) + category = "-1" # 7 Product_Category + views = "-1" # 8 Product_Number_Of_Views + reviews = "-1" # 9 Product_Number_Of_Reviews + rating_item = "-1" # 10 Product_Rating + addDate = "-1" # 11 Product_AddedDate + BTC = "-1" # 12 Product_BTC_SellingPrice + USD = "-1" # 13 Product_USD_SellingPrice + EURO = "-1" # 14 Product_EURO_SellingPrice + sold = "-1" # 15 Product_QuantitySold + left = "-1" # 16 Product_QuantityLeft + shipFrom = "-1" # 17 Product_ShippedFrom + shipTo = "-1" # 18 Product_ShippedTo + image = "-1" # 19 Product_Image + vendor_image = "-1" # 20 Vendor_Image + + product_info = soup.find('div', {'class': '2xl:self-start w-full h-full'}) + about_vendor = soup.find('div', {'class': 'px-0.5 py-1 flex flex-col items-center gap-2 text-[13px]'}) + + try: + ven_temp = about_vendor.find('div', {'class': 'flex items-center gap-0.5'}).find('a').text + ven_temp_list = ven_temp.split(' ') + if len(ven_temp_list) > 1: + ven_temp_list = ven_temp_list[:-1] + ven_temp = ' '.join(ven_temp_list) + vendor = cleanString(ven_temp.strip()) + except: + vendor = '-1' + print(vendor) + + success = "-1" + + try: + rating = about_vendor.find('div', {'class':'font-bold rounded px-2'}).text + rating_list = rating.split(' ') + rating = rating_list[0] + rating = rating.replace('%', '') + except: + rating = '-1' + rating_vendor = rating + print(rating_vendor) + + name = product_info.find('div', {'class': 'w-full flex gap-0.5 items-center border-solid border-0 border-border border-b group-hover:border-abacus2 text-sm font-bold justify-center 2xl:justify-start py-1 leading-tight'}) + name = cleanString(name.strip()) + try: + description = soup.find('div', {'class': 'hidden gap-2 flex-col w-0 p-3 anim anim-FadeIn'}).find('div', {'class':'text-xs w-full text-left text-black'}).text + description = cleanString(description.strip()) + except: + description = '-1' + describe = description + CVE = "-1" # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) + MS = "-1" # 6 Product_MS_Classification (Microsoft Security) + category = "-1" # 7 Product_Category + views = "-1" # 8 Product_Number_Of_Views + reviews = "-1" # 9 Product_Number_Of_Reviews + rating_item = "-1" # 10 Product_Rating + addDate = "-1" # 11 Product_AddedDate + BTC = "-1" # 12 Product_BTC_SellingPrice + USD = "-1" # 13 Product_USD_SellingPrice + EURO = "-1" # 14 Product_EURO_SellingPrice + sold = "-1" # 15 Product_QuantitySold + left = "-1" # 16 Product_QuantityLeft + shipFrom = "-1" # 17 Product_ShippedFrom + shipTo = "-1" # 18 Product_ShippedTo + image = "-1" # 19 Product_Image + vendor_image = "-1" # 20 Vendor_Image + + + # Populating the final variable (this should be a list with all fields scraped) + row = (vendor, rating_vendor, success, name, describe, CVE, MS, category, views, reviews, rating_item, addDate, + BTC, USD, EURO, sold, left, shipFrom, shipTo, image, vendor_image) + + # Sending the results + return row + + +# parses listing pages, so takes html pages of listing pages using soup object, and parses it for info it needs +# stores info it needs in different lists, these lists are returned after being organized +# @param: soup object looking at html page of listing page +# return: 'row' that contains a variety of lists that each hold info on the listing page +def abacus_listing_parser(soup): + + # Fields to be parsed + nm = 0 # *Total_Products (Should be Integer) + mktName = "Abacus" # 0 *Marketplace_Name + vendor = [] # 1 *Vendor y + rating_vendor = [] # 2 Vendor_Rating + success = [] # 3 Vendor_Successful_Transactions + name = [] # 4 *Product_Name y + CVE = [] # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) dont worry about this + MS = [] # 6 Product_MS_Classification (Microsoft Security) dont worry about this + category = [] # 7 Product_Category y + describe = [] # 8 Product_Description + views = [] # 9 Product_Number_Of_Views + reviews = [] # 10 Product_Number_Of_Reviews + rating_item = [] # 11 Product_Rating + addDate = [] # 12 Product_AddDate + BTC = [] # 13 Product_BTC_SellingPrice + USD = [] # 14 Product_USD_SellingPrice y + EURO = [] # 15 Product_EURO_SellingPrice + sold = [] # 16 Product_QuantitySold + qLeft = [] # 17 Product_QuantityLeft + shipFrom = [] # 18 Product_ShippedFrom + shipTo = [] # 19 Product_ShippedTo + image = [] # 20 Product_Image + image_vendor = [] # 21 Vendor_Image + href = [] # 22 Product_Links + + cat = soup.find('span', {"class": "btn btn-sm btn-outline-dark w-100 active"}).text + cat = cleanString(cat).strip() + + listing = soup.find('div', {"class": 'card-body text-black text-left bg-dark'}).findAll('div', {"class": 'card mb-4 border-danger rounded-0'}) + + # Populating the Number of Products + nm = len(listing) + + for a in listing: + + category.append(cat) + + # Adding the url to the list of urls + link = a.find('a', {'class': "badge badge-danger w-100 text-white"}).get('href') + link = cleanLink(link) + href.append(link) + + # Finding the Product name + product = a.find('div', {"class": 'marquee-parent'}).find('div', {"class": "marquee-child"}).text + product = product.replace('\n', ' ') + product = product.replace(",", "") + product = product.replace("...", "") + product = product.strip() + name.append(product) + + # Finding Product Image + product_image = a.find('img') + product_image = product_image.get('src') + product_image = product_image.split('base64,')[-1] + image.append(product_image) + + # Finding Prices + price = a.findAll('a', {"class": "text-white"})[-1].text + price = price.replace("$","") + price = price.strip() + USD.append(price) + + # Finding Item Rating + temp = a.find('small', {"class": "text-white"}) + rating = len(temp.findAll('i', {"class": "fas fa-star"})) + half_stars = len(temp.findAll('i', {'class': "fas fa-star-half-alt"})) + if half_stars > 0: + rating += 0.5 + rating_item.append(str(rating)) + + # Finding the Vendor + vendor_name = a.find('a', {"class": 'badge badge-dark w-100 text-white my-1'}).text + vendor_name = vendor_name.replace(",", "") + vendor_name = vendor_name.strip() + vendor.append(vendor_name) + image_vendor.append("-1") + + # Searching for CVE and MS categories + cve = a.findAll(text=re.compile('CVE-\d{4}-\d{4}')) + if not cve: + cveValue = "-1" + else: + cee = " " + for idx in cve: + cee += (idx) + cee += " " + cee = cee.replace(',', ' ') + cee = cee.replace('\n', '') + cveValue = cee + CVE.append(cveValue) + + ms = a.findAll(text=re.compile('MS\d{2}-\d{3}')) + if not ms: + MSValue = "-1" + else: + me = " " + for im in ms: + me += (im) + me += " " + me = me.replace(',', ' ') + me = me.replace('\n', '') + MSValue = me + MS.append(MSValue) + + # Populate the final variable (this should be a list with all fields scraped) + return organizeProducts(mktName, nm, vendor, rating_vendor, success, name, CVE, MS, category, describe, views, + reviews, rating_item, addDate, BTC, USD, EURO, sold, qLeft, shipFrom, shipTo, href, image, image_vendor) + + +# called by the crawler to get description links on a listing page +# @param: beautifulsoup object that is using the correct html page (listing page) +# return: list of description links from a listing page +def abacus_links_parser(soup): + # Returning all links that should be visited by the Crawler + + href = [] + listing = soup.findAll('div', {"class": "src-listing p-1 pb-0 flex flex-col justify-between w-full rounded-md border-solid border-[1px] !border-border !bg-white hover:!bg-hover hover:!border-abacus2 !mt-0"}) + + for a in listing: + link = a.find('a', href=True).get('href') + href.append(link) + + return href \ No newline at end of file