__author__ = 'Helium'

# Here, we are importing the auxiliary functions to clean or convert data
from MarketPlaces.Utilities.utilities import *

# Here, we are importing BeautifulSoup to search through the HTML tree
from bs4 import BeautifulSoup


#parses description pages, so takes html pages of description pages using soup object, and parses it for info it needs
#stores info it needs in different lists, these lists are returned after being organized
#@param: soup object looking at html page of description page
#return: 'row' that contains a variety of lists that each hold info on the description page
def m00nkey_description_parser(soup):

    # Fields to be parsed
    vendor = "-1"  # 0 *Vendor_Name
    success = "-1"  # 1 Vendor_Successful_Transactions
    rating_vendor = "-1"  # 2 Vendor_Rating
    name = "-1"  # 3 *Product_Name
    describe = "-1"  # 4 Product_Description
    CVE = "-1"  # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) dont worry about that much
    MS = "-1"  # 6 Product_MS_Classification (Microsoft Security) dont worry about that much
    category = "-1"  # 7 Product_Category
    views = "-1"  # 8 Product_Number_Of_Views
    reviews = "-1"  # 9 Product_Number_Of_Reviews
    rating_item = "-1"  # 10 Product_Rating
    addDate = "-1"  # 11 Product_AddedDate
    BTC = "-1"  # 12 Product_BTC_SellingPrice
    USD = "-1"  # 13 Product_USD_SellingPrice
    EURO = "-1"  # 14 Product_EURO_SellingPrice
    sold = "-1"  # 15 Product_QuantitySold
    left = "-1"  # 16 Product_QuantityLeft
    shipFrom = "-1"  # 17 Product_ShippedFrom
    shipTo = "-1"  # 18 Product_ShippedTo

    #vendor name
    try:
        temp = soup.find('div', {'class': 'box rounded mb-0'}).find('a').text
        vendor = (cleanString(temp.strip()))
    except:
        vendor = "-1"

    #successful transaction
    try:
        temp = soup.findAll('div', {'class','text-center text-truncate column-flex ml-1 mr-1'}) #card sidebar-menu mb-4 card sidebar-menu mb-4
        temp2 = temp[1].findAll('span', {'class', 'float-right font-weight-bold'})
        temp = temp2[1].text
        success = (temp.strip())
    except:
        print("success")


    #vendor rating 5
    try:
        temp = soup.findAll('div', {'class', 'text-center text-truncate column-flex ml-1 mr-1'})  # card sidebar-menu mb-4 card sidebar-menu mb-4
        temp2 = temp[1].findAll('span', {'class', 'float-right font-weight-bold'})
        temp = temp2[5].text
        rating_vendor = (cleanString(temp.strip()))
    except:
        print("product")

    # product name
    try:
        temp = soup.find('h3', {'class', 'h3 rounded card-title'}).find('span').text
        name = (cleanString(temp.strip()))
    except:
        temp = soup.find('h3', {'class', 'h3 rounded card-title'}).find('span').find("div").text
        name = (cleanString(temp.strip()))

    # product description
    describe = soup.find('div', {'class': "box rounded flex-fill"}).find('pre').text
    if "\n" in describe:
        describe = describe.replace("\n", " ")
        describe = describe.replace("\r", " ")
    describe = cleanString(describe.strip())

    CVE = "-1"  # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) dont worry about that much
    MS = "-1"  # 6 Product_MS_Classification (Microsoft Security) dont worry about that much

    # product category
    try:
        temp = soup.findAll('table', {'class', 'table table-hover'})
        temp2 = temp[1].find('tr').findAll('td')
        temp = temp2[1].text
        category = cleanString(temp.strip())
    except:
        temp = soup.find('table', {'class', 'table table-hover'})
        temp2 = temp.find('tbody').find('tr').findAll('td')
        temp = temp2[1].text
        category = cleanString(temp.strip())

    # product number of view
    try:
        temp = soup.find('div', {'class', 'box rounded mb-0'})
        temp2 = temp.findAll('i')
        temp = temp2[2].text
        views = cleanString((temp.strip()))
    except:
        print('Product number of view')

    reviews = "-1"  # 9 Product_Number_Of_Reviews
    rating_item = "-1"  # 10 Product_Rating
    addDate = "-1"  # 11 Product_AddedDate

    #BTC selling price box box-rounded mt-2
    try:
        temp = soup.find('div', {'class', 'box box-rounded mt-2'})
        temp2 = temp.findAll('i', {'class', 'float-right color-prices'})
        temp = temp2[1].text
        BTC = cleanString((temp.strip()))
    except:
        try:
            temp = soup.find('div', {'class', 'box box-rounded'})
            temp2 = temp.findAll('span', {'class', 'float-right color-prices'})
            temp = temp2[1].text
            BTC = cleanString((temp.strip()))
        except:
            print("BTC")


    # USD selling price
    try:
        temp = soup.find('div', {'class', 'box box-rounded mt-2'})
        temp2 = temp.findAll('center')
        temp = temp2[1].find('i').text
        if "$" in temp:
            temp = temp.replace("$", "")
        USD = cleanString((temp.strip()))
    except:
        try:
            temp = soup.find('div', {'class', 'box box-rounded'})
            temp2 = temp.findAll('center')
            temp = temp2[1].find('span').text
            if "$" in temp:
                temp = temp.replace("$", "")
            USD = cleanString((temp.strip()))
        except:
            print("USD")

    EURO = "-1"  # 14 Product_EURO_SellingPrice


   # product sold
    try:
        temp = soup.find('div', {'class', 'box rounded mb-0'})  # card sidebar-menu mb-4 card sidebar-menu mb-4
        temp2 = temp.find('i')
        temp = temp2.text
        sold = (cleanString(temp.strip()))
        # sold = "-1"
    except:
        print("product sold")

    # product quantatiy left  ###ERRROR
    try:
        temp = soup.findAll('table', {'class', 'table table-hover'})
        temp2 = temp[1].findAll('tr')
        temp3 = temp2[1].findAll('td')
        temp = temp3[1].text
        left = cleanString(temp.strip())
    except:
        temp = soup.find('table', {'class', 'table table-hover'})
        temp2 = temp.findAll('tr')
        temp3 = temp2[1].findAll('td')
        temp = temp3[1].text
        left = cleanString(temp.strip())


    shipFrom = "-1"  # 17 Product_ShippedFrom
    shipTo = "-1"  # 18 Product_ShippedTo

    # Populating the final variable (this should be a list with all fields scraped)
    row = (vendor, rating_vendor, success, name, describe, CVE, MS, category, views, reviews, rating_item, addDate,
           BTC, USD, EURO, sold, left, shipFrom, shipTo)

    # Sending the results
    return row


#parses listing pages, so takes html pages of listing pages using soup object, and parses it for info it needs
#stores info it needs in different lists, these lists are returned after being organized
#@param: soup object looking at html page of listing page
#return: 'row' that contains a variety of lists that each hold info on the listing page
def m00nkey_listing_parser(soup):
    # Fields to be parsed
    nm = 0  # *Total_Products (Should be Integer)
    mktName = "M00nkeyMarket"  # 0 *Marketplace_Name
    vendor = []  # 1 *Vendor y
    rating_vendor = []  # 2 Vendor_Rating
    success = []  # 3 Vendor_Successful_Transactions
    name = []  # 4 *Product_Name y
    CVE = []  # 5 Product_CVE_Classification (Common Vulnerabilities and Exposures) dont worry about this
    MS = []  # 6 Product_MS_Classification (Microsoft Security) dont worry about this
    category = []  # 7 Product_Category y
    describe = []  # 8 Product_Description
    views = []  # 9 Product_Number_Of_Views
    reviews = []  # 10 Product_Number_Of_Reviews
    rating_item = []  # 11 Product_Rating
    addDate = []  # 12 Product_AddDate
    BTC = []  # 13 Product_BTC_SellingPrice
    USD = []  # 14 Product_USD_SellingPrice y
    EURO = []  # 15 Product_EURO_SellingPrice
    sold = []  # 16 Product_QuantitySold
    qLeft = []  # 17 Product_QuantityLeft
    shipFrom = []  # 18 Product_ShippedFrom
    shipTo = []  # 19 Product_ShippedTo
    href = []  # 20 Product_Links

    listing = soup.findAll('div', {"class": "card mt-1"})

    # Populating the Number of Products
    nm = len(listing)

    for a in listing:

        # vendor
        try:
            temp = a.find('div', {'class','col-5 justify-content-between mx-auto'}).find('a').text
            vendor.append(cleanString(temp.strip()))
        except:
            print('vendor')

        #vendor rating


        #successful transactions CHECK AGAIN HERE
        try:
            success.append("-1")
        except:
            print('successful transactions')

        # product name
        try:
            temp = a.find('h5', {'class','card-title rounded text-truncate'}).find('a').text
            name.append(cleanString(temp.strip()))
        except:
            print('product name')


        CVE.append('-1')
        MS.append('-1')
        rating_vendor.append('-1')

        # product category
        try:
            temp = soup.find('div', {'class', 'card-sidebar-menu box mb-2 flex-column'}).find('h3').find('span').text
            if "Search Results for: " in temp:
                temp = temp.replace("Search Results for: ", "")
            category.append(cleanString(temp.strip()))

        except:
            print("Error in product category")

        describe.append('-1')

        # product views
        try:
            temp = a.find('h6',{'class', 'card-subtitle mb-1 text-muted text-truncate'})
            temp2 = temp.find('i').text
            views.append(cleanString(temp2.strip()))
        except:
            print("Error in views")

        reviews.append('-1')  # 10 Product_Number_Of_Reviews
        rating_item.append('-1')  # 11 Product_Rating
        addDate.append('-1')  # 12 Product_AddDate

        # BTC
        try:
            temp = a.find('div', {'class', 'col-3 justify-content-between mx-auto'})
            temp2 = temp.findAll('p')
            temp = temp2[1].text
            BTC.append(cleanString(temp.strip()))
        except:
            print("BTC")

        #USD ERROR get rid of $
        try:
            temp = a.find('div', {'class', 'col-12 justify-content-between mx-auto'}).find('i').text
            if '$' in temp:
                temp = temp.replace("$", "")
            USD.append(cleanString(temp.strip()))  # 14 Product_USD_SellingPrice
        except:
            print("USD")

        EURO.append("-1")  # 15 Product_EURO_SellingPrice

        #product sold
        try:
            temp = a.find('div', {'class', 'col-12 mx-auto text-truncate text-center flex-fill'}).findAll('p', {'class', 'card-text mb-0'})
            temp2 = temp[1].find('i').text
            sold.append(cleanString(temp2.strip()))
        except:
            print("product sold")

        qLeft.append('-1')  # 17 Product_QuantityLeft
        shipFrom.append('-1')  # 18 Product_ShippedFrom
        shipTo.append('-1')  # 19 Product_ShippedTo

        #href
        try:
            temp = a.find('h5', {'class', 'card-title rounded text-truncate'}).find('a').get('href')
            href.append(temp)  # 20 Product_Links
        except:
            print("href")


    # Populate the final variable (this should be a list with all fields scraped)
    return organizeProducts(mktName, nm, vendor, rating_vendor, success, name, CVE, MS, category, describe, views,
                            reviews, rating_item, addDate, BTC, USD, EURO, sold, qLeft, shipFrom, shipTo, href)

#called by the crawler to get description links on a listing page
#@param: beautifulsoup object that is using the correct html page (listing page)
#return: list of description links from a listing page
def m00nkey_links_parser(soup):

    # Returning all links that should be visited by the Crawler

    href = []
    listing = soup.findAll('div', {"class": "card mt-1"})

    for a in listing:
        bae = a.find('a',  href=True)#card-title rounded text-truncate
        link = bae['href']
        href.append(link)

    return href