backblaze download/recover file support

This commit is contained in:
LPOCHOLLE 2021-10-21 15:22:28 +02:00
parent 155dcf4855
commit 2f272f5486
3 changed files with 57 additions and 50 deletions

View File

@ -7,11 +7,9 @@ import mgzip as gzip
import tarfile import tarfile
import sqlite3 import sqlite3
import pathlib import pathlib
import hashlib
import b2sdk.v2
from b2sdk.v2 import B2Api from b2sdk.v2 import B2Api
from crypt import * from crypt import encrypt_file, decrypt_file
class Backup: class Backup:
@ -38,7 +36,7 @@ class Backup:
c_date = datetime.fromtimestamp(os.path.getctime(uri)).strftime("%Y-%m-%d %H:%M:%S.%f") c_date = datetime.fromtimestamp(os.path.getctime(uri)).strftime("%Y-%m-%d %H:%M:%S.%f")
if size > tarball_size: if size > tarball_size:
crypt_name = self.bdd.add([{'name': f, crypt_name = self.bdd.add([{'name': f,
'path': uri, 'path': pathlib.Path(uri).as_posix(),
'size': size, 'size': size,
'm_date': m_date, 'm_date': m_date,
'c_date': c_date}], 'c_date': c_date}],
@ -47,11 +45,11 @@ class Backup:
print("Proceed", uri) print("Proceed", uri)
enc = crypt(compress(uri), self.key) enc = crypt(compress(uri), self.key)
print(" Size : ", get_size(enc)) print(" Size : ", get_size(enc))
upload_b2(self.buk, enc, crypt_name) self.buk.upload_bytes(enc.read(), crypt_name)
save(enc, os.path.join(self.save_location, crypt_name)) # save(enc, os.path.join(self.save_location, crypt_name))
else: else:
files.append({'name': f, files.append({'name': f,
'path': uri, 'path': pathlib.Path(uri).as_posix(),
'size': size, 'size': size,
'm_date': m_date, 'm_date': m_date,
'c_date': c_date}) 'c_date': c_date})
@ -61,83 +59,77 @@ class Backup:
crypt_name = self.bdd.add(files, compress_mode="tar.gz") crypt_name = self.bdd.add(files, compress_mode="tar.gz")
if crypt_name is not None: if crypt_name is not None:
print("Proceed", path, ":", [file['name'] for file in files]) print("Proceed", path, ":", [file['name'] for file in files])
tarball = tar(files) tarball = tar([file['path'] for file in files])
enc = crypt(compress(tarball), self.key) enc = crypt(compress(tarball), self.key)
print(" Size : ", get_size(enc)) print(" Size : ", get_size(enc))
upload_b2(self.buk, enc, crypt_name) self.buk.upload_bytes(enc.read(), crypt_name)
save(enc, os.path.join(self.save_location, crypt_name)) # save(enc, os.path.join(self.save_location, crypt_name))
def recover_file(self, paths, parents=False, save_path=os.getcwd()):
files = self.bdd.get_crypt_file(paths)
for file in files:
dl = tempfile.SpooledTemporaryFile()
self.buk.download_file_by_name(file['crypt']).save(dl)
if parents:
save_path = os.path.join(save_path, file['path'])
untar(uncompress(uncrypt(dl, self.key)), file['path'], save_path)
def get_size(in_file): def get_size(in_file):
if type(in_file) is str: if type(in_file) is str:
filesize = os.path.getsize(in_file) filesize = os.path.getsize(in_file)
elif type(in_file) is io.BufferedRandom or tempfile.SpooledTemporaryFile: elif type(in_file) is tempfile.SpooledTemporaryFile:
in_file.seek(0, 2) in_file.seek(0, 2)
filesize = in_file.tell() filesize = in_file.tell()
in_file.seek(0) in_file.seek(0)
return human_size(filesize, decimal_places=1, unit='si') return human_size(filesize, decimal_places=1, unit='si')
def upload_b2(buk, file, save_name, chunksize=64 * 1024):
if type(file) is io.BufferedRandom or tempfile.SpooledTemporaryFile:
file.seek(0)
sha1 = hashlib.sha1(file.read()).hexdigest()
file.seek(0)
input_sources = b2sdk.v2.UploadSourceBytes(file.read(), content_sha1=sha1)
buk.upload(input_sources, save_name)
# while chunk := file.read(chunksize):
# input_sources = b2sdk.v2.UploadSourceBytes(chunk, content_sha1=sha1)
# buk.upload(input_sources, save_name)
else:
print("Unable to save " + str(file) + " of type " + str(type(file)))
return
def tar(files): def tar(files):
tarball = tempfile.SpooledTemporaryFile() tarball = tempfile.SpooledTemporaryFile()
with tarfile.open(fileobj=tarball, mode='w') as zipfile: with tarfile.open(fileobj=tarball, mode='w') as zipfile:
for file in files: for file in files:
# zipfile.add(os.path.join(file['path'], file['name'])) # zipfile.add(os.path.join(file['path'], file['name']))
zipfile.add(file['path']) zipfile.add(file)
return tarball return tarball
def untar(tar_file, files, save_path): def untar(tar_file, file, save_path):
if type(tar_file) is io.BufferedRandom or tempfile.SpooledTemporaryFile: if type(tar_file) is tempfile.SpooledTemporaryFile:
tar_file.seek(0) tar_file.seek(0)
zipfile = tarfile.open(fileobj=tar_file, mode='r') zipfile = tarfile.open(fileobj=tar_file, mode='r')
else: else:
zipfile = tarfile.open(tar_file, 'r') zipfile = tarfile.open(tar_file, 'r')
for file in files: if not os.path.isdir(save_path):
zipfile.extract(file['path'], path=save_path) os.mkdir(save_path)
for member in zipfile.getmembers():
if member.name == file:
member.name = os.path.basename(member.name) # remove the path
zipfile.extract(member, path=save_path)
zipfile.close() zipfile.close()
def compress(file): def compress(file):
if type(file) is str: if type(file) is str:
infile = open(file, 'rb') infile = open(file, 'rb')
elif type(file) is io.BufferedRandom or tempfile.SpooledTemporaryFile: elif type(file) is tempfile.SpooledTemporaryFile:
file.seek(0) file.seek(0)
infile = file infile = file
compressed_file = tempfile.SpooledTemporaryFile() compressed_file = tempfile.SpooledTemporaryFile()
with gzip.open(compressed_file, 'wb') as zipfile: with gzip.open(compressed_file, 'wb') as zipfile:
while chunk := infile.read(64 * 1024): zipfile.write(infile.read())
zipfile.write(chunk)
return compressed_file return compressed_file
def uncompress(file): def uncompress(file):
if type(file) is io.BufferedRandom or tempfile.SpooledTemporaryFile: if type(file) is tempfile.SpooledTemporaryFile:
file.seek(0) file.seek(0)
decompressed_file = tempfile.SpooledTemporaryFile() decompressed_file = tempfile.SpooledTemporaryFile()
with gzip.open(file, 'rb') as zipfile: with gzip.open(file, 'rb') as zipfile:
while chunk := zipfile.read(64 * 1024): decompressed_file.write(zipfile.read())
decompressed_file.write(chunk)
return decompressed_file return decompressed_file
@ -180,11 +172,11 @@ class DataBase:
def __create_table(self): def __create_table(self):
cursor = self.conn.cursor() cursor = self.conn.cursor()
cursor.execute("""DROP TABLE IF EXISTS files""") # cursor.execute("""DROP TABLE IF EXISTS files""")
#
cursor.execute("""DROP TABLE IF EXISTS crypt""") # cursor.execute("""DROP TABLE IF EXISTS crypt""")
#
self.conn.commit() # self.conn.commit()
cursor.execute(""" cursor.execute("""
CREATE TABLE IF NOT EXISTS files( CREATE TABLE IF NOT EXISTS files(
@ -210,7 +202,20 @@ class DataBase:
self.conn.commit() self.conn.commit()
def get_crypt_id(self, list_file): def get_crypt_file(self, list_file):
cursor = self.conn.cursor()
crypt_list = []
# for path in [file['path'] for file in list_file]:
for path in list_file:
path = pathlib.Path(path).as_posix()
cursor.execute("""SELECT crypt_id FROM files WHERE path=?""", (path,))
try:
crypt_list.append({'path': path, 'crypt': str(cursor.fetchone()['crypt_id']).zfill(5)})
except TypeError:
crypt_list.append({'path': path, 'crypt': None})
return crypt_list
def __get_crypt_id(self, list_file):
cursor = self.conn.cursor() cursor = self.conn.cursor()
crypt_id_list = [] crypt_id_list = []
for file in list_file: for file in list_file:
@ -244,7 +249,7 @@ class DataBase:
def add(self, list_file, compress_mode=None): def add(self, list_file, compress_mode=None):
cursor = self.conn.cursor() cursor = self.conn.cursor()
crypt_id = self.get_crypt_id(list_file) crypt_id = self.__get_crypt_id(list_file)
cursor.execute("""SELECT IFNULL(max(id) + 1, 0) as files_id FROM files""") cursor.execute("""SELECT IFNULL(max(id) + 1, 0) as files_id FROM files""")
file_id = cursor.fetchone()['files_id'] file_id = cursor.fetchone()['files_id']
proceed = False proceed = False

View File

@ -72,7 +72,7 @@ def decrypt_file(key, in_file, out_file, chunksize=24 * 1024):
if type(in_file) is str: if type(in_file) is str:
input = open(in_file, 'rb') input = open(in_file, 'rb')
elif type(in_file) is io.BufferedRandom or tempfile.SpooledTemporaryFile: elif type(in_file) is tempfile.SpooledTemporaryFile:
in_file.seek(0) in_file.seek(0)
input = in_file input = in_file
else: else:

View File

@ -12,8 +12,8 @@ else:
print(key) print(key)
application_key_id = '003aa00745ec42a0000000003' application_key_id = '003aa00745ec42a0000000004'
application_key = 'K003zMa5e07FheUrB38/fqKFfHSlXok' application_key = 'K003RNvGfy+pazc6pD97xuUzPcDEqS0'
bucket_id = '6a1a9000075465fe7cc4021a' bucket_id = '6a1a9000075465fe7cc4021a'
bdd = "bdd.db" bdd = "bdd.db"
@ -24,6 +24,8 @@ bck.save_location = "crypted"
rootdir = "test" rootdir = "test"
bck.recurse(rootdir) bck.recurse(rootdir)
bck.recover_file(paths=["test\depth1\depth2\\1316614572_leopard.jpg","test/depth1/depth1_text - Copie.txt"], save_path="recovered")
# base = DataBase(bdd) # base = DataBase(bdd)
# file1 = {'name': "testname", 'path': "pathtest"} # file1 = {'name': "testname", 'path': "pathtest"}
# file2 = {'name': "secondname", 'path': "secondpath"} # file2 = {'name': "secondname", 'path': "secondpath"}