this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

268 lines
11 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. # Here, we are importing the auxiliary functions to clean or convert data
  3. from MarketPlaces.Utilities.utilities import *
  4. # Here, we are importing BeautifulSoup to search through the HTML tree
  5. from bs4 import BeautifulSoup
  6. # parses description pages, so takes html pages of description pages using soup object, and parses it for info it needs
  7. # stores info it needs in different lists, these lists are returned after being organized
  8. # @param: soup object looking at html page of description page
  9. # return: 'row' that contains a variety of lists that each hold info on the description page
  10. def metaversemarket_description_parser(soup):
  11. # Fields to be parsed
  12. vendor = "-1" # 0 *Vendor_Name
  13. success = "-1" # 1 Vendor_Successful_Transactions
  14. rating_vendor = "-1" # 2 Vendor_Rating
  15. name = "-1" # 3 *Product_Name
  16. describe = "-1" # 4 Product_Description
  17. CVE = "-1" # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures)
  18. MS = "-1" # 6 Product_MS_Classification (Microsoft Security)
  19. category = "-1" # 7 Product_Category
  20. views = "-1" # 8 Product_Number_Of_Views
  21. reviews = "-1" # 9 Product_Number_Of_Reviews
  22. rating_item = "-1" # 10 Product_Rating
  23. addDate = "-1" # 11 Product_AddedDate
  24. BTC = "-1" # 12 Product_BTC_SellingPrice
  25. USD = "-1" # 13 Product_USD_SellingPrice
  26. EURO = "-1" # 14 Product_EURO_SellingPrice
  27. sold = "-1" # 15 Product_QuantitySold
  28. left = "-1" # 16 Product_QuantityLeft
  29. shipFrom = "-1" # 17 Product_ShippedFrom
  30. shipTo = "-1" # 18 Product_ShippedTo
  31. image = "-1" # 19 Product_Image
  32. vendor_image = "-1" # 20 Vendor_Image
  33. # Finding Product Name
  34. name = soup.find('div', {'class': "panel-heading"}).text
  35. name = cleanString(name.strip())
  36. temp = soup.findAll('div', {'class': "col-xs-12 col-sm-6 mt-5"})
  37. # Finding Product Image
  38. image = temp[0].find('img')
  39. image = image.get('src')
  40. image = image.split('base64,')[-1]
  41. # Finding Vendor
  42. temp = temp[1].findAll('span')
  43. vendor = temp[1].find('b').text
  44. vendor = cleanString(vendor.strip())
  45. # Finding Vendor Rating
  46. pos = soup.find('span', {'class': "badge bg-success fs-12px"}).text
  47. pos = int(cleanNumbers(pos).strip())
  48. neg = soup.find('span', {'class': "badge bg-danger fs-12px"}).text
  49. neg = int(cleanNumbers(neg).strip())
  50. total = pos + neg
  51. if total > 0:
  52. rating_vendor = str(pos / total)
  53. # Finding Prices
  54. USD = soup.find('h3', {'class': "mb-2"}).text
  55. USD = cleanNumbers(USD).strip()
  56. # Finding the Product Category
  57. temp = soup.select('div[class="mt-2"]')[1].text
  58. temp = temp.replace("Category:", "")
  59. category = temp.strip()
  60. # Finding Number of Views
  61. views = soup.find('button', {"class": "btn btn-secondary text-center w-33 fw-bold"}).text
  62. views = views.strip()
  63. # Finding the Product Quantity Available
  64. temp = soup.find('button', {"class": "btn btn-success text-center w-33 fw-bold"}).text
  65. temp = temp.split("/")
  66. left = temp[1].strip()
  67. # Finding Number Sold
  68. sold = temp[0].strip()
  69. # Finding Shipment Information (Origin)
  70. temp = soup.find('div', {'class': "alert alert-info"}).text
  71. temp = temp.split("to")
  72. shipFrom = temp[0].replace("Shipping from ", "").strip()
  73. # Finding Shipment Information (Destination)
  74. shipTo = temp[1].split("for")
  75. shipTo = shipTo[0].strip()
  76. # Finding the Product description
  77. describe = soup.find('p', {'class': "card-text"}).text
  78. describe = cleanString(describe.strip())
  79. # Searching for CVE and MS categories
  80. cve = soup.findAll(text=re.compile('CVE-\d{4}-\d{4}'))
  81. if cve:
  82. CVE = " "
  83. for idx in cve:
  84. CVE += (idx)
  85. CVE += " "
  86. CVE = CVE.replace(',', ' ')
  87. CVE = CVE.replace('\n', '')
  88. ms = soup.findAll(text=re.compile('MS\d{2}-\d{3}'))
  89. if ms:
  90. MS = " "
  91. for im in ms:
  92. MS += (im)
  93. MS += " "
  94. MS = MS.replace(',', ' ')
  95. MS = MS.replace('\n', '')
  96. # Populating the final variable (this should be a list with all fields scraped)
  97. row = (vendor, rating_vendor, success, name, describe, CVE, MS, category, views, reviews, rating_item, addDate,
  98. BTC, USD, EURO, sold, left, shipFrom, shipTo, image, vendor_image)
  99. # Sending the results
  100. return row
  101. # parses listing pages, so takes html pages of listing pages using soup object, and parses it for info it needs
  102. # stores info it needs in different lists, these lists are returned after being organized
  103. # @param: soup object looking at html page of listing page
  104. # return: 'row' that contains a variety of lists that each hold info on the listing page
  105. def metaversemarket_listing_parser(soup):
  106. # Fields to be parsed
  107. nm = 0 # *Total_Products (Should be Integer)
  108. mktName = "MetaVerseMarket" # 0 *Marketplace_Name
  109. vendor = [] # 1 *Vendor y
  110. rating_vendor = [] # 2 Vendor_Rating
  111. success = [] # 3 Vendor_Successful_Transactions
  112. name = [] # 4 *Product_Name y
  113. CVE = [] # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) dont worry about this
  114. MS = [] # 6 Product_MS_Classification (Microsoft Security) dont worry about this
  115. category = [] # 7 Product_Category y
  116. describe = [] # 8 Product_Description
  117. views = [] # 9 Product_Number_Of_Views
  118. reviews = [] # 10 Product_Number_Of_Reviews
  119. rating_item = [] # 11 Product_Rating
  120. addDate = [] # 12 Product_AddDate
  121. BTC = [] # 13 Product_BTC_SellingPrice
  122. USD = [] # 14 Product_USD_SellingPrice y
  123. EURO = [] # 15 Product_EURO_SellingPrice
  124. sold = [] # 16 Product_QuantitySold
  125. qLeft = [] # 17 Product_QuantityLeft
  126. shipFrom = [] # 18 Product_ShippedFrom
  127. shipTo = [] # 19 Product_ShippedTo
  128. image = [] # 20 Product_Image
  129. image_vendor = [] # 21 Vendor_Image
  130. href = [] # 22 Product_Links
  131. listing = soup.findAll('div', {"class": "col-12 col-sm-4 col-xl-3 product_item_col p-1"})
  132. # Populating the Number of Products
  133. nm = len(listing)
  134. for a in listing:
  135. bae = a.findAll('a', href=True)
  136. # Adding the url to the list of urls
  137. link = bae[0].get('href')
  138. link = cleanLink(link)
  139. href.append(link)
  140. # Finding the Product
  141. product = bae[1].find('span', {"class": "text-primary"}).text
  142. name.append(cleanString(product.strip()))
  143. # Finding Prices
  144. price = a.find('strong').text
  145. USD.append(cleanNumbers(price).strip())
  146. # Finding the Vendor
  147. temp = a.find('div', {'class': "mt-1 fs-12px"})
  148. temp = temp.findAll('span')
  149. vendor_name = temp[1].find('b').text
  150. vendor.append(cleanString(vendor_name.strip()))
  151. # Finding the Category
  152. cat = a.select_one('div[class="fs-12px"]')
  153. cat = cat.findAll('span')[1].text
  154. cat = cat.strip()
  155. category.append(cat)
  156. ul = a.find('ul', {"class": "product-actions"})
  157. # Finding Number Sold and Quantity Left
  158. temp = ul.find('span', {'class': "badge bg-success"}).text
  159. temp = temp.split("/")
  160. num = temp[0]
  161. num = num.replace('k', '000')
  162. sold.append(cleanNumbers(num).strip())
  163. quant = temp[1]
  164. quant = quant.replace('k', '000')
  165. qLeft.append(cleanNumbers(quant).strip())
  166. # Finding Descrption
  167. # description = a.find('p', {'class': "alert alert-light text-ssbold p-1"}).text
  168. # description = description.replace("\n", " ")
  169. # description = description.strip()
  170. # describe.append(cleanString(description))
  171. # Finding Number of Views
  172. view = ul.find('span', {'class': "badge bg-primary"}).text
  173. view = view.replace('.', '')
  174. view = view.replace('K', '000')
  175. views.append(view.strip())
  176. # Find where ships from
  177. ships = a.find('div', {'class': "alert alert-info item_alert fs-12px p-1"})
  178. ships = ships.findAll('b')
  179. sFrom = ships[0].text.strip()
  180. shipFrom.append(sFrom)
  181. # Find where it ships to
  182. sTo = ships[1].text.strip()
  183. shipTo.append(sTo)
  184. # Searching for CVE and MS categories
  185. cve = a.findAll(text=re.compile('CVE-\d{4}-\d{4}'))
  186. if not cve:
  187. cveValue = "-1"
  188. else:
  189. cee = " "
  190. for idx in cve:
  191. cee += (idx)
  192. cee += " "
  193. cee = cee.replace(',', ' ')
  194. cee = cee.replace('\n', '')
  195. cveValue = cee
  196. CVE.append(cveValue)
  197. ms = a.findAll(text=re.compile('MS\d{2}-\d{3}'))
  198. if not ms:
  199. MSValue = "-1"
  200. else:
  201. me = " "
  202. for im in ms:
  203. me += (im)
  204. me += " "
  205. me = me.replace(',', ' ')
  206. me = me.replace('\n', '')
  207. MSValue = me
  208. MS.append(MSValue)
  209. # Populate the final variable (this should be a list with all fields scraped)
  210. return organizeProducts(mktName, nm, vendor, rating_vendor, success, name, CVE, MS, category, describe, views,
  211. reviews, rating_item, addDate, BTC, USD, EURO, sold, qLeft, shipFrom, shipTo, href, image, image_vendor)
  212. # called by the crawler to get description links on a listing page
  213. # @param: beautifulsoup object that is using the correct html page (listing page)
  214. # return: list of description links from a listing page
  215. def metaversemarket_links_parser(soup):
  216. # Returning all links that should be visited by the Crawler
  217. href = []
  218. listing = soup.findAll('div', {"class": "col-12 col-sm-4 col-xl-3 product_item_col p-1"})
  219. for a in listing:
  220. bae = a.find('a', href=True)
  221. link = bae['href']
  222. href.append(link)
  223. return href