diff --git a/MarketPlaces/DB_Connection/db_connection.py b/MarketPlaces/DB_Connection/db_connection.py index 403d59a..ab8eb6c 100644 --- a/MarketPlaces/DB_Connection/db_connection.py +++ b/MarketPlaces/DB_Connection/db_connection.py @@ -5,6 +5,7 @@ import traceback import configparser from MarketPlaces.Utilities.utilities import * from dateutil.relativedelta import relativedelta, FR +from scipy.spatial import distance def connectDataBase(): @@ -88,6 +89,27 @@ def verifyItem(cur, marketId, hrefItem): trace = traceback.format_exc() print (trace) +def verifyImage(cur, base64Image): + + try: + + cur.execute("lock table market_images IN ACCESS EXCLUSIVE MODE") + + cur.execute("select image_id from market_images where hash_image = %(hashImage)s limit 1", + {'hashImage': generate_image_hash(base64Image)}) + + recset = cur.fetchall() + + if recset: + return recset[0]['image_id'] + else: + return 0 + + except: + + trace = traceback.format_exc() + print (trace) + def getLastMarketPlace(cur): try: @@ -209,17 +231,19 @@ def create_vendor(cur, row, marketId): if newVendor: - sql = "Insert into vendors (vendor_id, market_id, name_vendor, rating_vendor, successfultransactions_vendor, image_vendor, dateinserted_vendor) " \ - "Values (%s, %s, %s, %s, %s, %s, %s)" + imageId = create_image(cur, row[21], row[23]) - recset = [vendorId, marketId, - row[1], - row[2] if row[2] != '-1' else None, - row[3] if row[3] != '-1' else None, - row[21] if row[21] != '-1' else None, - row[23]] + sql = "Insert into vendors (vendor_id, market_id, name_vendor, rating_vendor, successfultransactions_vendor, image_vendor, dateinserted_vendor) " \ + "Values (%s, %s, %s, %s, %s, %s, %s)" - cur.execute(sql, recset) + recset = [vendorId, marketId, + row[1], + row[2] if row[2] != '-1' else None, + row[3] if row[3] != '-1' else None, + row[21] if row[21] != '-1' else None, + row[23]] + + cur.execute(sql, recset) else: @@ -229,11 +253,13 @@ def create_vendor(cur, row, marketId): recset = cur.fetchall() - # decode_decrypt_image_in_base64(recset[0][5]) + imageId = recset[0]['image_vendor'] + if not is_same_image(row[21], recset[0]['image_vendor'])): + imageId = create_image(cur, row[21], row[23]) if (str(recset[0]['rating_vendor']) != str(row[2] if row[2] != '-1' else None) or # there was a change in the vendor information str(recset[0]['successfultransactions_vendor']) != str(row[3] if row[3] != '-1' else None) or - str(recset[0]['image_vendor']) != str(row[21] if row[21] != '-1' else None)): + str(recset[0]['image_vendor']) != str(imageId)): vendorVersionId = int(getLastVendorVersion(cur, vendorId) + 1) @@ -253,10 +279,10 @@ def create_vendor(cur, row, marketId): sql = "Update vendors set rating_vendor = %(rating_vendor)s, successfultransactions_vendor = %(successfultransactions_vendor)s, " \ "image_vendor = %(image_vendor)s, dateinserted_vendor = %(dateinserted_vendor)s where vendor_id = %(vendorId)s" cur.execute(sql, {'rating_vendor': row[2] if row[2] != '-1' else None, - 'successfultransactions_vendor': row[3] if row[3] != '-1' else None, - 'image_vendor': row[21] if row[21] != '-1' else None, - 'dateinserted_vendor': row[23], - 'vendorId': vendorId}) + 'successfultransactions_vendor': row[3] if row[3] != '-1' else None, + 'image_vendor': imageId, + 'dateinserted_vendor': row[23], + 'vendorId': vendorId}) return vendorId @@ -275,7 +301,7 @@ def create_items(cur, row, marketId, vendorId): if newItem: - # decode_decrypt_image_in_base64(row[20]) + imageId = create_image(cur, row[20], row[23]) sql = "Insert into items (item_id, market_id, vendor_id, name_item, description_item, cve_item, ms_item, category_item, " \ "views_item, reviews_item, rating_item, dateadded_item, btc_item, usd_item, euro_item, quantitysold_item, " \ @@ -300,7 +326,7 @@ def create_items(cur, row, marketId, vendorId): row[17] if row[17] != '-1' else None, row[18] if row[18] != '-1' else None, row[19] if row[19] != '-1' else None, - row[20] if row[20] != '-1' else None, + imageId, hrefItem, row[23], row[23], @@ -316,7 +342,9 @@ def create_items(cur, row, marketId, vendorId): recset = cur.fetchall() - # decode_decrypt_image_in_base64(recset[0]['image_item']) + imageId = recset[0]['image_item'] + if not is_same_image(row[20], recset[0]['image_item'])): + imageId = create_image(cur, row[20], row[23]) if (str(recset[0]['vendor_id']) != str(vendorId) or str(recset[0]['name_item']) != str(row[4] if row[4] != '-1' else None) or @@ -335,7 +363,7 @@ def create_items(cur, row, marketId, vendorId): str(recset[0]['quantityleft_item']) != str(row[17] if row[17] != '-1' else None) or str(recset[0]['shippedfrom_item']) != str(row[18] if row[18] != '-1' else None) or str(recset[0]['shippedto_item']) != str(row[19] if row[19] != '-1' else None) or - str(recset[0]['image_item']) != str(row[20] if row[20] != '-1' else None) or + str(recset[0]['image_item']) != str(imageId) or str(recset[0]['classification_item']) != str(row[24] if row[24] != '-1' else None)): itemVersionId = int(getLastItemVersion(cur, itemId) + 1) @@ -398,7 +426,7 @@ def create_items(cur, row, marketId, vendorId): 'quantityleft_item': row[17] if row[17] != '-1' else None, 'shippedfrom_item': row[18] if row[18] != '-1' else None, 'shippedto_item': row[19] if row[19] != '-1' else None, - 'image_item': row[20] if row[20] != '-1' else None, + 'image_item': imageId, 'lastseen_item': row[23], 'dateinserted_item': row[23], 'classification_item': row[24] if row[24] != '-1' else None, @@ -413,6 +441,74 @@ def create_items(cur, row, marketId, vendorId): return itemId +def is_same_image(newBase64Image, oldImageId): + + if newBase64Image == "-1" and oldImageId == "-1": + return True + + if newBase64Image == "-1": # and oldImageId != "-1" + return False + + if oldImageId == "-1": # and newBase64Image != "-1" + return False + + sql = "select * from market_images where image_id = %(imageId)s" + cur.execute(sql, {'imageId': oldImageId}) + + recset = cur.fetchall() + + decImage = decode_decrypt_image_in_base64(base64Image) + + hash1 = generate_image_hash(decImage) + hash2 = recset[0]['hash_image'] + + if hash1 == hash2: + return True + + features1 = json.loads(extract_hidden_layer_output(dec_string)) + features2 = json.loads(recset[0]['resnet50_image']) + + similarity = 1 - distance.cosine(features1, features2) + + return similarity < 0.8 + +def create_image(cur, base64Image, date): + + if base64Image == "-1": + return None + + imageId = verifyImage(base64Image) + + if not imageId: + imageId = int(getLastImage(cur) + 1) + newImage = True + else: + newImage = False + + if newImage: + + decImage = decode_decrypt_image_in_base64(base64Image) + + hashImage = generate_image_hash(decImage) + hloFeatures = extract_hidden_layer_output(decImage) + keypoints, descriptors = extract_keypoints(decImage) + + sql = "Insert into market_images(image_id, hash_image, base64_image, " \ + "resnet50_image, siftkey_image, siftdesc_image, dateinserted_image) " \ + "Values (%s, %s, %s, %s, %s, %s, %s)" + + recset = [imageId, + hashImage, + base64Image, + hloFeatures, + keypoints, + descriptors, + date] + + cur.execute(sql, recset) + + return imageId + def create_status(cur, marketId, date, listings, descriptions, status): date = datetime.strptime(date, "%m%d%Y") @@ -434,10 +530,20 @@ def create_status(cur, marketId, date, listings, descriptions, status): recset = [marketId, date, listings, descriptions, status, date_reference] cur.execute(sql, recset) + def create_database(cur, con): try: + sql = "create table market_images(image_id integer not null, hash_image character varying(64) not null, base64_image character varying(10000000) not null, " \ + "resnet50_image character varying(1000000) null, siftkey_image character varying(1000000) null, siftdesc_image character varying(1000000) null, " \ + "dateinserted_item timestamp(6) with time zone not null, " \ + "constraint items_pk primary key (image_id))" + cur.execute(sql) + + sql = "create unique index unique_image ON market_images USING btree (hash_image ASC NULLS LAST)" + cur.execute(sql) + sql = "create table marketplaces(market_id integer not null, name_market character varying(255) not null, " \ "url_market character varying(255) not null, dateinserted_market timestamp(6) with time zone not null, " \ "constraint markets_pk primary key (market_id))" @@ -456,7 +562,8 @@ def create_database(cur, con): "varying(255) not null, rating_vendor character varying(255), successfultransactions_vendor integer " \ "null, image_vendor character varying(10000000) null, dateinserted_vendor timestamp(6) with time zone not null, " \ "constraint vendors_pk primary key (vendor_id), " \ - "constraint vendors_market_id_fk foreign key (market_id) references marketplaces (market_id))" + "constraint vendors_market_id_fk foreign key (market_id) references marketplaces (market_id), " \ + "constraint vendors_image_id_fkey foreign key (image_id) references market_images (image_id))" cur.execute(sql) sql = "create unique index unique_vendor ON vendors USING btree (market_id ASC NULLS LAST, name_vendor ASC NULLS LAST)" @@ -465,9 +572,10 @@ def create_database(cur, con): sql = "create table vendors_history(vendor_id integer not null, version_vendor integer not null, market_id integer not null, name_vendor " \ "character varying(255) not null, rating_vendor character varying(255), successfultransactions_vendor " \ "integer null, image_vendor character varying(10000000) null, dateinserted_vendor timestamp(6) with time zone not null, " \ - "constraint vendors_history_pk primary key (vendor_id, version_vendor), constraint vendors_history_vendor_id_fkey foreign key (" \ - "vendor_id) references vendors (vendor_id), constraint vendors_history_market_id_fkey foreign key (" \ - "market_id) references marketplaces (market_id))" + "constraint vendors_history_pk primary key (vendor_id, version_vendor), " \ + "constraint vendors_history_vendor_id_fkey foreign key (vendor_id) references vendors (vendor_id), " \ + "constraint vendors_history_market_id_fkey foreign key (market_id) references marketplaces (market_id), " \ + "constraint vendors_history_image_id_fkey foreign key (image_id) references market_images (image_id))" cur.execute(sql) sql = "create table items(item_id integer not null, market_id integer not null, vendor_id integer not null, name_item character " \ @@ -477,11 +585,13 @@ def create_database(cur, con): "character varying(25) null, btc_item character varying(255) null, usd_item character varying(255) " \ "null, euro_item character varying(255) null, quantitysold_item integer null, quantityleft_item " \ "character varying(255) null, shippedfrom_item character varying(255) null, shippedto_item character " \ - "varying(255) null, image_item character varying(10000000) null, href_item character varying(255) not null, " \ + "varying(255) null, image_id integer null, href_item character varying(255) not null, " \ "lastseen_item timestamp(6) with time zone not null, dateinserted_item timestamp(6) with time zone not null, " \ - "classification_item double precision null, constraint items_pk primary key (item_id), constraint " \ - "items_market_id_fkey foreign key (market_id) references marketplaces (market_id),constraint " \ - "items_vendor_id_fkey foreign key (vendor_id) references vendors (vendor_id))" + "classification_item double precision null, " \ + "constraint items_pk primary key (item_id), " \ + "constraint items_market_id_fkey foreign key (market_id) references marketplaces (market_id), " \ + "constraint items_vendor_id_fkey foreign key (vendor_id) references vendors (vendor_id), " \ + "constraint items_image_id_fkey foreign key (image_id) references market_images (image_id))" cur.execute(sql) sql = "create unique index unique_item ON items USING btree (market_id ASC NULLS LAST, href_item ASC NULLS LAST)" @@ -494,12 +604,14 @@ def create_database(cur, con): "character varying(25) null, btc_item character varying(255) null, usd_item character varying(255) " \ "null, euro_item character varying(255) null, quantitysold_item integer null, quantityleft_item " \ "character varying(255) null, shippedfrom_item character varying(255) null, shippedto_item character " \ - "varying(255) null, image_item character varying(10000000) null, href_item character varying(255) not null, " \ + "varying(255) null, image_id integer null, href_item character varying(255) not null, " \ "lastseen_item timestamp(6) with time zone not null, dateinserted_item timestamp(6) with time zone not null, " \ - "classification_item double precision null, constraint items_history_pk primary key (item_id, version_item), " \ + "classification_item double precision null, " \ + "constraint items_history_pk primary key (item_id, version_item), " \ + "constraint items_history_item_id_fkey foreign key (item_id) references items (item_id), " \ "constraint items_history_market_id_fkey foreign key (market_id) references marketplaces (market_id), " \ "constraint items_history_vendor_id_fkey foreign key (vendor_id) references vendors (vendor_id), " \ - "constraint items_history_item_id_fkey foreign key (item_id) references items (item_id))" + "constraint items_history_image_id_fkey foreign key (image_id) references market_images (image_id))" cur.execute(sql) con.commit() @@ -513,4 +625,4 @@ def create_database(cur, con): if (trace.find("already exists")==-1): print ("There was a problem during the database creation." ) traceback.print_exc() - raise SystemExit \ No newline at end of file + raise SystemExit diff --git a/MarketPlaces/Utilities/utilities.py b/MarketPlaces/Utilities/utilities.py index 9a04d1d..c698436 100644 --- a/MarketPlaces/Utilities/utilities.py +++ b/MarketPlaces/Utilities/utilities.py @@ -7,6 +7,13 @@ import hashlib import base64 import io import configparser +import json +import keras +import cv2 +import numpy as np +from keras.preprocessing import image +from keras.applications.imagenet_utils import preprocess_input +from keras.models import Model from datetime import datetime, timedelta from lxml import html as lxml from selenium.webdriver.common.by import By @@ -38,185 +45,88 @@ aes_key = generate_aes_key() encryptCipher = AES.new(aes_key, AES.MODE_ECB) decryptCipher = AES.new(aes_key, AES.MODE_ECB) +model = keras.applications.ResNet50(weights='imagenet', include_top=True) +feat_extractor = Model(inputs=model.input, outputs=model.get_layer('avg_pool').output) -def convertDate(sdate, language, crawlerDate): - - if language == "english": - - today = crawlerDate.strftime("%m/%d/%Y") - yesterday = (crawlerDate - timedelta(1)).strftime("%m/%d/%Y") - - sdate = sdate.replace(u"January","01") - sdate = sdate.replace(u"February","02") - sdate = sdate.replace(u"March","03") - sdate = sdate.replace(u"April","04") - sdate = sdate.replace(u"May","05") - sdate = sdate.replace(u"June","06") - sdate = sdate.replace(u"July","07") - sdate = sdate.replace(u"August","08") - sdate = sdate.replace(u"September","09") - sdate = sdate.replace(u"October","10") - sdate = sdate.replace(u"November","11") - sdate = sdate.replace(u"December","12") - sdate = sdate.replace(u"Jan","01") - sdate = sdate.replace(u"Feb","02") - sdate = sdate.replace(u"Mar","03") - sdate = sdate.replace(u"Apr","04") - sdate = sdate.replace(u"May","05") - sdate = sdate.replace(u"Jun","06") - sdate = sdate.replace(u"Jul","07") - sdate = sdate.replace(u"Aug","08") - sdate = sdate.replace(u"Sep","09") - sdate = sdate.replace(u"Oct","10") - sdate = sdate.replace(u"Nov","11") - sdate = sdate.replace(u"Dec","12") - sdate = sdate.replace(u".","") - - if "Today" in sdate: - sdate = datetime.strptime(str(today), '%m/%d/%Y').strftime('%m %d %Y') - elif "Yesterday" in sdate: - sdate = datetime.strptime(str(yesterday), '%m/%d/%Y').strftime('%m %d %Y') - - sdate = datetime.strptime(str(sdate), '%m %d %Y').strftime('%m/%d/%Y') - - elif language == "british": - - sdate = datetime.strptime(str(sdate), '%d %m %Y').strftime('%m/%d/%Y') - - elif language == "french": - - todaysday = crawlerDate.strftime("%m/%d/%Y") - - sdate = sdate.replace(u"janvier","01") - sdate = sdate.replace(u"jan","01") - sdate = sdate.replace(u"février","02") - sdate = sdate.replace(u"juin","06") - sdate = sdate.replace(u"juillet","07") - sdate = sdate.replace(u"juil","07") - sdate = sdate.replace(u"août","08") - sdate = sdate.replace(u"septembre","09") - sdate = sdate.replace(u"sept","09") - sdate = sdate.replace(u"octobre","10") - sdate = sdate.replace(u"oct","10") - sdate = sdate.replace(u"novembre","11") - sdate = sdate.replace(u"nov","11") - sdate = sdate.replace(u"décembre","12") - sdate = sdate.replace(u"déc","12") - sdate = sdate.replace(u".","") - - if sdate == u"Aujourd'hui" or "Today" in sdate: - sdate = datetime.strptime(str(todaysday), '%m/%d/%Y').strftime('%d %m %Y') - - if "mar" in sdate: - print ("Add March to the IBM Black Market") - raise SystemExit - elif "avr" in sdate: - print ("Add April to the IBM Black Market") - raise SystemExit - elif "mai" in sdate: - print ("Add May to the IBM Black Market") - raise SystemExit - - sdate = datetime.strptime(str(sdate), '%d %m %Y').strftime('%m/%d/%Y') - - elif language == "swedish": - - sdate = sdate.replace(u"jan","01") - sdate = sdate.replace(u"feb","02") - sdate = sdate.replace(u"mar","03") - sdate = sdate.replace(u"apr","04") - sdate = sdate.replace(u"maj","05") - sdate = sdate.replace(u"jun","06") - sdate = sdate.replace(u"jul","07") - sdate = sdate.replace(u"aug","08") - sdate = sdate.replace(u"sep","09") - sdate = sdate.replace(u"okt","10") - sdate = sdate.replace(u"nov","11") - sdate = sdate.replace(u"dec","12") - sdate = sdate.replace(u".","") - - if sdate == u"Ig\xe5r" or sdate == u"Idag" or "minuter sedan" in sdate: - sdate = crawlerDate - sdate = datetime.strptime(str(sdate), '%Y-%m-%d').strftime('%d %m %Y') - - sdate = datetime.strptime(str(sdate), '%d %m %Y').strftime('%m/%d/%Y') - - elif language == "russian": - - if sdate == u'\u0412\u0447\u0435\u0440\u0430' or u"Вчера" in sdate: - sdate = crawlerDate - timedelta(1) - sdate = datetime.strptime(str(sdate), '%Y-%m-%d').strftime('%d %m %Y') - elif sdate == u'\u0421\u0435\u0433\u043e\u0434\u043d\u044f': - sdate = crawlerDate - sdate = datetime.strptime(str(sdate), '%Y-%m-%d').strftime('%d %m %Y') - elif u'\xd1\xee\xe7\xe4\xe0\xed\xee' in sdate: - return "" - - sdate = sdate.replace(u"Январь","01") - sdate = sdate.replace(u"января","01") - sdate = sdate.replace(u"янв","01") - sdate = sdate.replace(u"January","01") - sdate = sdate.replace(u"Jan","01") - sdate = sdate.replace(u"фев","02") - sdate = sdate.replace(u"февраля","02") - sdate = sdate.replace(u"Февраль", "02") - sdate = sdate.replace(u"February", "02") - sdate = sdate.replace(u"Feb", "02") - sdate = sdate.replace(u"Март","03") - sdate = sdate.replace(u"марта","03") - sdate = sdate.replace(u"March","03") - sdate = sdate.replace(u"Mar","03") - sdate = sdate.replace(u"Апрель","04") - sdate = sdate.replace(u"апреля","04") - sdate = sdate.replace(u"апр","04") - sdate = sdate.replace(u"April","04") - sdate = sdate.replace(u"Apr","04") - sdate = sdate.replace(u"май","05") - sdate = sdate.replace(u"Май","05") - sdate = sdate.replace(u"мар","05") - sdate = sdate.replace(u"май","05") - sdate = sdate.replace(u"мая","05") - sdate = sdate.replace(u"May","05") - sdate = sdate.replace(u"Июнь","06") - sdate = sdate.replace(u"июня","06") - sdate = sdate.replace(u"июн","06") - sdate = sdate.replace(u"June","06") - sdate = sdate.replace(u"Jun","06") - sdate = sdate.replace(u"Июль","07") - sdate = sdate.replace(u"июля","07") - sdate = sdate.replace(u"июл","07") - sdate = sdate.replace(u"July","07") - sdate = sdate.replace(u"Jul","07") - sdate = sdate.replace(u"августа","08") - sdate = sdate.replace(u"Август","08") - sdate = sdate.replace(u"авг","08") - sdate = sdate.replace(u"August","08") - sdate = sdate.replace(u"Aug","08") - sdate = sdate.replace(u"Сентябрь","09") - sdate = sdate.replace(u"сентября","09") - sdate = sdate.replace(u"сен","09") - sdate = sdate.replace(u"September","09") - sdate = sdate.replace(u"Sep","09") - sdate = sdate.replace(u"октября","10") - sdate = sdate.replace(u"Октябрь","10") - sdate = sdate.replace(u"October","10") - sdate = sdate.replace(u"Oct","10") - sdate = sdate.replace(u"окт","10") - sdate = sdate.replace(u"Ноябрь","11") - sdate = sdate.replace(u"ноября","11") - sdate = sdate.replace(u"ноя","11") - sdate = sdate.replace(u"November","11") - sdate = sdate.replace(u"Nov","11") - sdate = sdate.replace(u"Декабрь","12") - sdate = sdate.replace(u"декабря","12") - sdate = sdate.replace(u"дек","12") - sdate = sdate.replace(u"December","12") - sdate = sdate.replace(u"Dec","12") - sdate = sdate.replace(u".","") - - sdate = datetime.strptime(str(sdate), '%d %m %Y').strftime('%m/%d/%Y') - - return sdate +sift = cv2.SIFT_create( + nfeatures=0, # Number of features, 0 for unlimited + nOctaveLayers=3, # Number of layers per octave + contrastThreshold=0.09, # Contrast threshold + edgeThreshold=10, # Edge threshold + sigma=1.6 # Initial Gaussian blur sigma +) + + +def generate_image_hash(image_string): + + image_bytes = bytes(image_string, encoding='utf-8') + image_bytes = base64.b64decode(image_bytes) + + return hashlib.sha256(image_bytes).hexdigest() + + +def extract_hidden_layer_output(image_string): + + image_bytes = bytes(image_string, encoding='utf-8') + image_bytes = base64.b64decode(image_bytes) + im = Image.open(io.BytesIO(image_bytes)).convert('RGB') + + x = image.img_to_array(im) + x = image.smart_resize(x, size=model.input_shape[1:3], interpolation='nearest') + x = np.expand_dims(x, axis=0) + x = preprocess_input(x) + + return json.dumps(feat_extractor.predict(x)[0].tolist()) + + +def extract_keypoints(image_string): + + image_bytes = bytes(image_string, encoding='utf-8') + image_bytes = base64.b64decode(image_bytes) + image_array = np.asarray(bytearray(image_bytes), dtype=np.uint8) + + img = cv2.imdecode(image_array, cv2.IMREAD_GRAYSCALE) + + keypoints, descriptors = sift.detectAndCompute(img, None) + + return json.dumps(wrap_keypoints(keypoints)), json.dumps(descriptors.tolist()) + + +def wrap_keypoints(keypoints): + + keypoints_list = [] + + for i in range(len(keypoints)): + temp = { + 'pt': keypoints[i].pt, + 'size': keypoints[i].size, + 'angle': keypoints[i].angle, + 'octave': keypoints[i].octave, + 'response': keypoints[i].response, + 'class_id': keypoints[i].class_id + } + keypoints_list.append(temp) + + return keypoints_list + + +def unwrap_keypoints(keypoints_list): + + keypoints = [] + + for temp in keypoints_list: + point = cv2.KeyPoint( + x=temp['pt'][0], + y=temp['pt'][1], + size=temp['size'], + angle=temp['angle'], + octave=temp['octave'], + response=temp['response'], + class_id=temp['class_id'] + ) + keypoints.append(point) + + return tuple(keypoints) def cleanText(originalText): @@ -330,20 +240,16 @@ def cleanNumbers(inputString): return updated_string -def aes_encryption(item): - - to_bytes = bytes(item) +def aes_encryption(data_bytes): - encrypted_bytes = encryptCipher.encrypt(pad(to_bytes, BLOCK_SIZE)) + encrypted_bytes = encryptCipher.encrypt(pad(data_bytes, BLOCK_SIZE)) return encrypted_bytes -def aes_decryption(item): - - to_bytes = bytes(item) +def aes_decryption(data_bytes): - decrypted_bytes = decryptCipher.decrypt(to_bytes) + decrypted_bytes = decryptCipher.decrypt(data_bytes) return unpad(decrypted_bytes, BLOCK_SIZE) @@ -357,28 +263,28 @@ def encrypt_encode_image_to_base64(driver, xpath): encrypted_image = aes_encryption(image_data) base64_image = base64.b64encode(encrypted_image) - string_image = base64_image.decode('utf-8') + enc_image_string = base64_image.decode('utf-8') - return string_image + return enc_image_string - except: + except Exception as e: + print(e) pass return None -def decode_decrypt_image_in_base64(string_image): +def decode_decrypt_image_in_base64(image_string): try: - base64_image = bytes(string_image, encoding='utf-8') - encrypted_image = base64.b64decode(base64_image) - decrypted_image = aes_decryption(encrypted_image) - - im = Image.open(io.BytesIO(decrypted_image)) - im.show() + image_bytes = bytes(image_string, encoding='utf-8') + encrypted_bytes = base64.b64decode(image_bytes) + decrypted_image = aes_decryption(encrypted_bytes) + base64_image = base64.b64encode(decrypted_image) + dec_image_string = base64_image.decode('utf-8') - return decrypted_image + return dec_image_string except Exception as e: print(e)