this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

303 lines
12 KiB

  1. __author__ = 'DarkWeb'
  2. # Here, we are importing the auxiliary functions to clean or convert data
  3. from MarketPlaces.Utilities.utilities import *
  4. # Here, we are importing BeautifulSoup to search through the HTML tree
  5. from bs4 import BeautifulSoup
  6. # parses description pages, so takes html pages of description pages using soup object, and parses it for info it needs
  7. # stores info it needs in different lists, these lists are returned after being organized
  8. # @param: soup object looking at html page of description page
  9. # return: 'row' that contains a variety of lists that each hold info on the description page
  10. def gofish_description_parser(soup):
  11. # Fields to be parsed
  12. vendor = "-1" # 0 *Vendor_Name
  13. success = "-1" # 1 Vendor_Successful_Transactions
  14. rating_vendor = "-1" # 2 Vendor_Rating
  15. name = "-1" # 3 *Product_Name
  16. describe = "-1" # 4 Product_Description
  17. CVE = "-1" # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures)
  18. MS = "-1" # 6 Product_MS_Classification (Microsoft Security)
  19. category = "-1" # 7 Product_Category
  20. views = "-1" # 8 Product_Number_Of_Views
  21. reviews = "-1" # 9 Product_Number_Of_Reviews
  22. rating_item = "-1" # 10 Product_Rating
  23. addDate = "-1" # 11 Product_AddedDate
  24. BTC = "-1" # 12 Product_BTC_SellingPrice
  25. USD = "-1" # 13 Product_USD_SellingPrice
  26. EURO = "-1" # 14 Product_EURO_SellingPrice
  27. sold = "-1" # 15 Product_QuantitySold
  28. left = "-1" # 16 Product_QuantityLeft
  29. shipFrom = "-1" # 17 Product_ShippedFrom
  30. shipTo = "-1" # 18 Product_ShippedTo
  31. image = "-1" # 19 Product_Image
  32. vendor_image = "-1" # 20 Vendor_Image
  33. # Finding Product Name
  34. divmb = soup.find('div', {'class': "p-3 mb-1 fs-3 fw-bold border border-2 bg-white rounded"})
  35. if divmb is None:
  36. divmb = soup.find('div', {'class': "p-3 mb-1 fs-4 fw-bold border border-2 bg-white rounded"})
  37. name = divmb.text
  38. name = name.replace('\n', ' ')
  39. name = name.replace('\r', ' ')
  40. name = name.replace('\t', ' ')
  41. name = name.replace(",", "")
  42. name = name.strip()
  43. # Finding Vendor
  44. vendor = soup.find('div', {'class': 'my-1'}).find('a').text.strip()
  45. # Finding Vendor Rating
  46. # temp = soup.find('div', {'class': ""}).text
  47. # temp = temp.split('(')
  48. # rating = temp[0].replace("Vendor's Review : ", "")
  49. # rating = rating.replace("%", "")
  50. # rating_vendor = rating.strip()
  51. # Finding the Product Rating and Number of Product Reviews
  52. # reviews = temp[2].replace(" review)", "")
  53. # reviews = reviews.strip()
  54. # temp = temp[1].split(")")
  55. # rating = temp[1].replace("Product Review : ", "")
  56. # rating = rating.replace("%", "")
  57. # rating_item = rating.strip()
  58. # Finding Prices
  59. precios = soup.findAll('td', {'class': "text-end text-nowrap"})
  60. USD = precios[0].text.strip().replace('$', '')
  61. # Finding the Product Category
  62. # pmb = soup.findAll('p', {'class': "mb-1"})
  63. # category = pmb[-1].text
  64. # category = category.replace("Category: ", "").strip()
  65. # Finding the Product Quantity Available
  66. # left = divmb[-1].text
  67. # left = left.split(",", 1)[1]
  68. # left = left.replace("in stock", "")
  69. # left = left.strip()
  70. # Finding Number Sold
  71. # sold = divmb[-1].text
  72. # sold = sold.split(",", 1)[0]
  73. # sold = sold.replace("sold", "")
  74. # sold = sold.strip()
  75. # Finding Shipment Information (Origin)
  76. origin = soup.findAll('div', {'class': "p-3 mt-2 mb-3 border border-2 bg-white rounded"})
  77. remove = origin[0].find('span').text.strip()
  78. origin = origin[0].text.strip()
  79. origin = origin.replace(remove, '')
  80. shipFrom = origin.strip()
  81. # Finding Shipment Information (Destination)
  82. dest = soup.findAll('div', {'class': 'p-3 mb-3 overflow-auto border border-2 bg-white rounded'})
  83. dest = dest[-1].text.strip()
  84. dest = dest.replace('[', '')
  85. dest = dest.replace(']', '')
  86. shipTo = dest[1:].strip()
  87. # Finding the Product description
  88. cardbody = soup.findAll('div', {'class': "p-3 mb-3 overflow-auto border border-2 bg-white rounded"})
  89. describe = cardbody[0].text
  90. describe = describe.replace('\n', ' ')
  91. describe = describe.strip()
  92. # Finding Product Image
  93. img = soup.findAll('figure', {'class': 'image-feature'})[0]
  94. image = img.find('img', {'class': 'image-block rounded'})
  95. image = image.get('src')
  96. image = image.split('base64,')[-1]
  97. # Searching for CVE and MS categories
  98. cve = soup.findAll(text=re.compile('CVE-\d{4}-\d{4}'))
  99. if cve:
  100. CVE = " "
  101. for idx in cve:
  102. CVE += (idx)
  103. CVE += " "
  104. CVE = CVE.replace(',', ' ')
  105. CVE = CVE.replace('\n', '')
  106. ms = soup.findAll(text=re.compile('MS\d{2}-\d{3}'))
  107. if ms:
  108. MS = " "
  109. for im in ms:
  110. MS += (im)
  111. MS += " "
  112. MS = MS.replace(',', ' ')
  113. MS = MS.replace('\n', '')
  114. # Populating the final variable (this should be a list with all fields scraped)
  115. row = (vendor, rating_vendor, success, name, describe, CVE, MS, category, views, reviews, rating_item, addDate,
  116. BTC, USD, EURO, sold, left, shipFrom, shipTo, image, vendor_image)
  117. # Sending the results
  118. return row
  119. # parses listing pages, so takes html pages of listing pages using soup object, and parses it for info it needs
  120. # stores info it needs in different lists, these lists are returned after being organized
  121. # @param: soup object looking at html page of listing page
  122. # return: 'row' that contains a variety of lists that each hold info on the listing page
  123. def gofish_listing_parser(soup):
  124. # Fields to be parsed
  125. nm = 0 # *Total_Products (Should be Integer)
  126. mktName = "GoFish" # 0 *Marketplace_Name
  127. vendor = [] # 1 *Vendor y
  128. rating_vendor = [] # 2 Vendor_Rating
  129. success = [] # 3 Vendor_Successful_Transactions
  130. name = [] # 4 *Product_Name y
  131. CVE = [] # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) dont worry about this
  132. MS = [] # 6 Product_MS_Classification (Microsoft Security) dont worry about this
  133. category = [] # 7 Product_Category y
  134. describe = [] # 8 Product_Description
  135. views = [] # 9 Product_Number_Of_Views
  136. reviews = [] # 10 Product_Number_Of_Reviews
  137. rating_item = [] # 11 Product_Rating
  138. addDate = [] # 12 Product_AddDate
  139. BTC = [] # 13 Product_BTC_SellingPrice
  140. USD = [] # 14 Product_USD_SellingPrice y
  141. EURO = [] # 15 Product_EURO_SellingPrice
  142. sold = [] # 16 Product_QuantitySold
  143. qLeft = [] # 17 Product_QuantityLeft
  144. shipFrom = [] # 18 Product_ShippedFrom
  145. shipTo = [] # 19 Product_ShippedTo
  146. image = [] # 20 Product_Image
  147. image_vendor = [] # 21 Vendor_Image
  148. href = [] # 22 Product_Links
  149. listing = soup.find('tbody', {"class": "border border-2 align-middle"}).findAll('tr')
  150. # Populating the Number of Products
  151. nm = len(listing)
  152. for a in listing:
  153. bae = a.findAll('a', href=True)
  154. # Adding the url to the list of urls
  155. link = bae[0].get('href')
  156. href.append(link)
  157. # Finding the Product
  158. product = bae[1].text
  159. product = product.replace('\n', ' ')
  160. product = product.replace(",", "")
  161. product = product.replace("...", "")
  162. product = product.strip()
  163. name.append(product)
  164. # Finding Product Image
  165. product_image = bae[0].find('img')
  166. product_image = product_image.get('src')
  167. product_image = product_image.split('base64,')[-1]
  168. image.append(product_image)
  169. # Finding Prices
  170. price = a.find('span', {"class": "fw-bold text-nowrap"}).text
  171. price = price.replace("$","")
  172. price = price.strip()
  173. USD.append(price)
  174. # Finding the Vendor
  175. vendor_name = bae[-1].text
  176. vendor_name = vendor_name.replace(",", "")
  177. vendor_name = vendor_name.strip()
  178. vendor.append(vendor_name)
  179. image_vendor.append("-1")
  180. # Finding the Category
  181. # cat = lb[-1].find("span").text
  182. # cat = cat.replace("class:", "")
  183. # cat = cat.strip()
  184. # category.append(cat)
  185. # span = lb[0].findAll("span")
  186. # Finding Number of Views
  187. # num = span[0].text
  188. # num = num.replace("views:", "")
  189. # num = num.strip()
  190. # sold.append(num)
  191. # Finding Number Sold
  192. # num = span[2].text
  193. # num = num.replace("Sold:", "")
  194. # num = num.strip()
  195. # sold.append(num)
  196. # Finding Quantity Left
  197. # quant = span[1].text
  198. # quant = quant.replace("stock:", "")
  199. # quant = quant.strip()
  200. # qLeft.append(quant)
  201. # add shipping information
  202. # ship = lb[2].findAll('small')[1].findAll('span')[1].text.split("->")
  203. # shipFrom.append(ship[0].replace("Ship from ", "").strip())
  204. # shipTo.append(ship[1].replace("to ", "").strip())
  205. # Searching for CVE and MS categories
  206. cve = a.findAll(text=re.compile('CVE-\d{4}-\d{4}'))
  207. if not cve:
  208. cveValue = "-1"
  209. else:
  210. cee = " "
  211. for idx in cve:
  212. cee += (idx)
  213. cee += " "
  214. cee = cee.replace(',', ' ')
  215. cee = cee.replace('\n', '')
  216. cveValue = cee
  217. CVE.append(cveValue)
  218. ms = a.findAll(text=re.compile('MS\d{2}-\d{3}'))
  219. if not ms:
  220. MSValue = "-1"
  221. else:
  222. me = " "
  223. for im in ms:
  224. me += (im)
  225. me += " "
  226. me = me.replace(',', ' ')
  227. me = me.replace('\n', '')
  228. MSValue = me
  229. MS.append(MSValue)
  230. # Populate the final variable (this should be a list with all fields scraped)
  231. return organizeProducts(mktName, nm, vendor, rating_vendor, success, name, CVE, MS, category, describe, views,
  232. reviews, rating_item, addDate, BTC, USD, EURO, sold, qLeft, shipFrom, shipTo, href, image, image_vendor)
  233. # called by the crawler to get description links on a listing page
  234. # @param: beautifulsoup object that is using the correct html page (listing page)
  235. # return: list of description links from a listing page
  236. def gofish_links_parser(soup):
  237. # Returning all links that should be visited by the Crawler
  238. href = []
  239. listing = soup.find('tbody', {'class': 'border border-2 align-middle'})
  240. listing = soup.findAll('tr')
  241. listing = listing[1:]
  242. # for a in listing:
  243. # bae = a.find('a', {"class": "text-info"}, href=True)
  244. # link = bae['href']
  245. # href.append(link)
  246. for a in listing:
  247. bae = a.findAll('a', href=True)
  248. # Adding the url to the list of urls
  249. link = bae[0].get('href')
  250. href.append(link)
  251. return href