diff --git a/.gitignore b/.gitignore index 719d0ba..e9bfb23 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ core/__pycache__/ +restore-points/ *.tar.gz *.tar *.meta diff --git a/README.md b/README.md index c66de2d..e7f3ac4 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ -# Pacback - Alpha 1.4 + +# Pacback - Alpha 1.5 +***WARNING: VERSION 1.5 BREAKS COMPATIBLITY WITH FULL RESTORE POINTS GENERATED BEFORE VERSION 1.5*** + **TLDR: This projects ultimate goal is to provide flexible and resilient downgrades while still maintaining a slim profile and fast performance.** @@ -64,7 +67,7 @@ One of the problems with rolling releases is you never know when a problem might ![Pacback Snapback](https://i.imgur.com/AX92cfz.gif) ### Simple System Maintenance for Developers -If you are like me you love to install and test the latest projects the community is working on. The downside of doing this is the slow build-up of packages as you try to remember why that you ever installed a set of packages. To avoid this you can use pacback to create a restore point then install a bunch of experimental packages you only plan on keeping for a few days. After you're done, simply roll back to the Restore Point and all the packages you installed will be removed. +If you are like me you love to install and test the latest projects the community is working on. The downside of doing this is the slow build-up of packages as you try to remember why that you ever installed a set of packages. To avoid this you can use pacback to create a restore point then install a bunch of experimental packages you only plan on keeping for a few days. After you're done, simply roll back to the Restore Point and all the packages you installed will be removed. In the following example, I will install Haskell which is a dependency nightmare. After installing it we will show how to quickly uninstall all your changes. 1. First, we create a restore point with: `pacback -c 3` @@ -74,7 +77,7 @@ In the following example, I will install Haskell which is a dependency nightmare ![Pacback Haskell](https://imgur.com/PzUznWZ.gif) ### Backup Version Sensitive Application Data -In some cases, config files many need to be modified when updating packages. In other cases, you may want to backup application data before deploying an upgrade incase of error or corruption. Pacback makes it extremely simple to store files like this and will automatically compare files you have stored against your current file system. Once checksumming is complete you can selectively overwrite each subsection of file type: Changed, Added, and Removed. +In some cases, config files many need to be modified when updating packages. In other cases, you may want to backup application data before deploying an upgrade in case of error or corruption. Pacback makes it extremely simple to store files like this and will automatically compare files you have stored against your current file system. Once checksumming is complete you can selectively overwrite each subsection of file type: Changed, Added, and Removed. In this example we pack up an Apache websever and Postgresql database. 1. `pacback -c 1 -f -d /var/www /etc/httpd /var/lib/postgres/data` @@ -84,15 +87,15 @@ In this example we pack up an Apache websever and Postgresql database. ![Pacback Saving App Data](https://imgur.com/Ag0NROG.gif) ### Rollback a List of Packages -Most issues with an update stem from a single package or a set of related package. Pacback allows you to selectively rollback a list of packages using `pacback -pkg package_1 package_2 package_3`. Packback searches your file system looking for all versions assoicated with each package package name. When searching for a package, be as spesific as possible. Since generic names like 'linux' or 'gcc' apper in many package names, the search may be cluttered with unrelated packages. +Most issues with an update stem from a single package or a set of related package. Pacback allows you to selectively rollback a list of packages using `pacback -pkg package_1 package_2 package_3`. Packback searches your file system looking for all versions associated with each package name. When searching for a package, be as specific as possible. Since generic names like 'linux' or 'gcc' appear in many package names, the search may be cluttered with unrelated packages. -In this example we selectively rollback 2 packages. +In this example, we selectively rollback 2 packages. 1. `pacback -pkg typescript electron4` ![Pacback Rolling Back a List of Packages](https://imgur.com/Rhy6iDn.gif) ### Rolling Back to an Archive Date -Another popular way to rollback package versions is to use the Arch Linux Archives to pull packages with directly with pacman. Pacback automates this entire process with the `pacback -rb` command. To rollback to a specific date, give `-rb` a date in YYYY/MM/DD format and Pacback will automatically save your mirrorlist, point a new mirrorlist to an archive URL, then run a full system downgrade. When every you are ready to jump back to the head, run `pacback -u` and Pacback with automatically retore your old mirrorlist. In the event that you destroy this backup, Pacback can automatically fetch a new HTTP US mirrorlist for the system. +Another popular way to rollback package versions is to use the Arch Linux Archives to pull packages directly with pacman. Pacback automates this entire process with the `pacback -rb` command. To rollback to a specific date, give `-rb` a date in YYYY/MM/DD format and Pacback will automatically save your mirrorlist, point a new mirrorlist to an archive URL, then run a full system downgrade. When every you are ready to jump back to the head, run `pacback -u` and Pacback with automatically retore your old mirrorlist. In the event that you destroy this backup, Pacback can automatically fetch a new HTTP US mirrorlist for the system. 1. `pacback -rb 2019/10/18` @@ -113,24 +116,27 @@ By default, Pacback creates a Light Restore Point which consists of only a .meta **Light Restore Point Disadvantages:** - Light RP's Will Fail to Provide Real Value If Old Package Versions Are Removed (aka. paccahe -r) - - Light RP's Are Not Portable ### Full Restore Points -When a Full Restore Point is used, Pacback searches through your file system looking for each package version installed. Pacback then creates a Restore Point tar which contains all the compiled packages installed on the system at the time of its creation, along with any additional files the user specifies. Full Restore Points also generate a metadata file but even if you lose or delete this file, you will still be able to run a full system recovery and pacback will simply skip its more advanced features. +When a Full Restore Point is used, Pacback searches through your file system looking for each package version installed. Pacback then creates a Restore Point folder which contains a hardlink to each compiled package installed on the system at the time of its creation, along with any additional files the user specifies. Since each package is a hardlinked to an inode, a package can be referenced an infinite number of times without duplication. A package will not be fully deleted from the system until all links to the inode are removed. This also provides light restore points additional resilience as they can search full restore points for the packages they need. + -When you fallback on a Full Restore Point, Pacback will unpack the tar and install all the packages contained within. It will also give you the ability to remove any new packages added since its creation. Once this is complete, if you have packed any config files into the restore point, Pacback with checksum each file and compare it to your file system. Pacback will then let you selectively overwrite each subsection of file type: Changed, Added, and Removed. +![https://i.imgur.com/eikZF2g.jpg](https://i.imgur.com/eikZF2g.jpg) + +Full Restore Points also generate a metadata file but even if you lose or delete this file, you will still be able to run a full system rollback and pacback will simply skip its more advanced features. When you fallback on a Full Restore Point, Pacback runs its normal package checks giving you the ability rollback packages and remove any new packages added since its creation. Once this is complete, if you have any config files saved, Pacback with checksum each file and compare it to your file system. Pacback will then let you selectively overwrite each subsection of file type: Changed, Added, and Removed. **Full Restore Point Advantages:** - Full RP's Are 100% Self Contained - Adding Custom Directories Allows for the Rollback of Config Files Associated with New Versions - - Full RP's are Portable and Can Be Used to Deploy Staged Updates to Servers - - Full RP's Can Backup Entire Systems and Applications + - Full RP's Ensure That Packages Are Not Prematurely Removed + - Provides Light Restore Points Additional Resilience **Full Restore Point Disadvantages:** - - Full RP's Are Large - - Full RP's Create Duplicate Copies of Complied Packages Already Present in the Cache or in Other RP's - - Full RP's are IO Bound During Compression and Decompression +- Hardlinking Packages Can Take A Long Time +- The Addition of Thousands of Duplicate File Names Requires Pacback to Use Costly Duplication Filters +- Full RP's Don't Protect Against Inode Corruption +------------------ ### Metadata Files Restore Point metadata files contain information in a human readable format about packages installed at the time of its creation along with other information. This information is used by Pacback to restore older versions of packages and provide general information about the Restore Point. Each meta data file will look something like this: @@ -147,6 +153,24 @@ aarch64-linux-gnu-binutils 2.33.1-1 aarch64-linux-gnu-gcc 9.2.0-1 aarch64-linux-gnu-glibc 2.30-1 aarch64-linux-gnu-linux-api-headers 4.20-1 -...... -.... -.. + +------------------ + +## Feature Path, Known Bugs, Issues, and Limitations +This list is likely to have many changes and edits as new versions are released. Please read this carefully when updating versions or deploying pacback to new systems. + +### Issues: +- **Pacback Skips Checksumming Files over 1GB.** - This is done for a number of reasons, first of which is that python sucks at this. I mean its god awful at reading large files. In my testing checksumming took 30x-50x longer compared to a terminal equivalent. The second reason large files are skipped is that this it is outside of Pacback's use-case. Packaging directories is intended for saving the state of potentially thousands of small configuration files, not large iso's or archives. + +- **Pacback Creates Missing Directories as Root.** - Currently files are copied out of the restore point with the exact same permissions they went in with. The issue here is the creation of missing directories. When Pacback creates these directories the original permissions are not copied. + +### Feature Path: +- [ ] Version Checking +- [ ] Pacman Hook +- [ ] Impoved Searches for Individual Packages +- [ ] Fix Checksumming +- [ ] Fix Directory Creation +- [ ] Better Color Output +- [ ] Arch Archive Support for Singular Package Versions +- [ ] Faster Package Searches +- [ ] Improve Internal Documentation diff --git a/core/pacback.py b/core/pacback.py index 8f5b655..0a89486 100755 --- a/core/pacback.py +++ b/core/pacback.py @@ -1,21 +1,13 @@ #! /usr/bin/env python3 #### A utility for marking and restoring stable arch packages -version = '1.4.1' +version = '1.5.0' from python_scripts import * import tqdm, argparse -def prError(text): print("\u001b[31;1m{}\033[00m" .format(text)) -def prSuccess(text): print("\u001b[32;1m{}\033[00m" .format(text)) -def prWorking(text): print("\033[33m{}\033[00m" .format(text)) -def prWarning(text): print("\033[93m{}\033[00m" .format(text)) -def prChanged(text): print("\u001b[35m{}\033[00m" .format(text)) -def prRemoved(text): print("\033[31m{}\033[00m" .format(text)) -def prAdded(text): print("\033[94m{}\033[00m" .format(text)) - def trim_pkg_list(pkg_list): - pkg_split = {pkg.split('/')[-1] for pkg in pkg_list} ### Remove Dir Path + pkg_split = {pkg.split('/')[-1] for pkg in pkg_list} ### Remove Dir Path pkg_split = {'-'.join(pkg.split('-')[:-1]) for pkg in pkg_split} ### Remove .pkg.tar.xz From Name - return pkg_list + return pkg_split #<#><#><#><#><#><#>#<#>#<# #+# Create Restore Point @@ -26,47 +18,49 @@ def create_restore_point(rp_num, rp_full, dir_list): if not os.path.exists(base_dir + '/restore-points'): mkdir(base_dir + '/restore-points', sudo=True) open_permissions(base_dir + '/restore-points') - - ### Check for Existing Restore Points - rp_path = base_dir + '/restore-points/rp' + str(rp_num).zfill(2) - if os.path.exists(rp_path + '.tar') or os.path.exists(rp_path + '.tar.gz') or os.path.exists(rp_path + '.meta'): - if not int(rp_num) == 0: - if args.no_confirm == False: - prWarning('Restore Point #' + str(rp_num).zfill(2) + ' Already Exists!') - if yn_frame('Do You Want to Overwrite It?') == False: - return prError('Aborting RP Creation!') - rm_file(rp_path + '.tar', sudo=True) - rm_file(rp_path + '.tar.gz', sudo=True) - rm_file(rp_path + '.meta', sudo=True) - + ### Set Base Vars + rp_path = base_dir + '/restore-points/rp' + str(rp_num).zfill(2) + rp_tar = rp_path + '/' + str(rp_num).zfill(2) + '_dirs.tar' + rp_meta = rp_path + '.meta' rp_files = set() found_pkgs = set() pac_size = 0 dir_size = 0 + ### Check for Existing Restore Points + if os.path.exists(rp_path) or os.path.exists(rp_meta): + if not int(rp_num) == 0: ### Check If -Syu + if args.no_confirm == False: + prWarning('Restore Point #' + str(rp_num).zfill(2) + ' Already Exists!') + if yn_frame('Do You Want to Overwrite It?') == False: + return prError('Aborting RP Creation!') + rm_file(rp_meta, sudo=True) + rm_dir(rp_path, sudo=True) + if rp_full == True: ################################### ### Find Pkgs for Restore Point ### ################################### + pac_cache = rp_path + '/pac_cache' print('Building Full Restore Point...') prWorking('Retrieving Current Packages...') current_pkgs = pacman_Q(replace_spaces=True) ### Search File System for Pkgs prWorking('Bulk Scanning for ' + str(len(current_pkgs)) + ' Packages...') - found_pkgs = find_pacman_pkgs(current_pkgs, find_paccache()) - + found_pkgs = fetch_pacman_pkgs(current_pkgs, fetch_paccache()) + ### Get Size of Pkgs Found for p in found_pkgs: try: pac_size += os.path.getsize(p) except: pass ### Ask About Missing Pkgs - if not found_pkgs == current_pkgs: + if not len(found_pkgs) == len(current_pkgs): if args.no_confirm == False: pkg_split = trim_pkg_list(found_pkgs) - prWarning('The Following Packages Where NOT Found!') + prError('The Following Packages Where NOT Found!') for pkg in set(current_pkgs - pkg_split): prWarning(pkg + ' Was NOT Found!') if yn_frame('Do You Still Want to Continue?') == True: @@ -74,42 +68,39 @@ def create_restore_point(rp_num, rp_full, dir_list): else: return prError('Aborting RP Creation!') - ### Add Path Within Tar - found_pkgs = {f + '<>/pac_cache/' + os.path.basename(f) for f in found_pkgs} + ############################### + ### HardLink Packages to RP ### + ############################### + mkdir(rp_path, sudo=False) + mkdir(pac_cache, sudo=False) + for pkg in tqdm.tqdm(found_pkgs, desc='Hardlinking Packages to Pacback RP'): + os.system('sudo ln ' + pkg + ' ' + pac_cache + '/' + pkg.split('/')[-1]) ################################ ### Find Custom Files for RP ### ################################ if len(dir_list) > 0: + ### Find and Get Size of Custom Files for d in dir_list: - for f in search_fs(d): + for f in search_fs(d, 'set'): try: dir_size += os.path.getsize(f) except: pass + rp_files.add(f) + + ### Pack Custom Folders Into a Tar + with tarfile.open(rp_tar, 'w') as tar: + for f in tqdm.tqdm(rp_files, desc='Adding Dir\'s to Tar'): + tar.add(f) + + ### Compress Custom Files If Added Larger Than 1GB + if dir_size > 1073741824: + prWorking('Compressing Restore Point Files...') + ### Check to See if pigz is Installed + if any(re.findall('pigz', line.lower()) for line in current_pkgs): + os.system('pigz ' + rp_tar + ' -f') + else: + gz_c(rp_tar, rm=True) - ### Recursivly Add Files From Each Base Dir - for path in dir_list: - l = search_fs(path, 'set') - for x in l: - rp_files.add(x +'<>'+ x) - - ########################### - ### Build Restore Point ### - ########################### - with tarfile.open(rp_path + '.tar', 'w') as tar: - tar_files = found_pkgs.union(rp_files) ### Combine Packages and Dirs - for f in tqdm.tqdm(tar_files, desc='Building Restore Point'): - s = f.split('<>') - tar.add(s[0], s[1]) ### This Parses List of Files in the Format '/dir/in/system/<>/dir/in/tar' - - ### Compress Restore Point if Files Added Larger Than 1GB - if dir_size > 1073741824: - prWorking('Compressing Restore Point...') - ### Check to See if pigz is Installed - if any(re.findall('pigz', line.lower()) for line in current_pkgs): - os.system('pigz ' + rp_path + '.tar -f') - else: - gz_c(rp_path + '.tar', rm=True) - elif rp_full == False: print('Building Light Restore Point...') @@ -139,7 +130,7 @@ def create_restore_point(rp_num, rp_full, dir_list): meta_list.append(pkg) ### Export Final Meta File - export_list(rp_path + '.meta', meta_list) + export_list(rp_meta, meta_list) prSuccess('Restore Point #' + str(rp_num).zfill(2) + ' Successfully Created!') @@ -149,21 +140,27 @@ def create_restore_point(rp_num, rp_full, dir_list): def rollback_to_rp(rp_num): rp_path = base_dir + '/restore-points/rp' + str(rp_num).zfill(2) + rp_tar = rp_path + '/' + str(rp_num).zfill(2) + '_dirs.tar' + rp_meta = rp_path + '.meta' current_pkgs = pacman_Q() + + ### Check if Old Version + if os.path.exists(rp_path + '.tar') or os.path.exists(rp_path + '.tar.gz'): + return prError('Full Restore Points Generated Before Version 1.5.0 Are No Longer Compatible With Newer Versions of Pacback!') ### Set Full RP Status - if os.path.exists(rp_path + '.tar') or os.path.exists(rp_path + '.tar.gz'): + if os.path.exists(rp_path): full_rp = True else: full_rp = False - ### Set Meta Status and Read in Present Then Set Vars - if os.path.exists(rp_path + '.meta'): + ### Set Meta Status, Read Meta, Diff Packages, Set Vars + if os.path.exists(rp_meta): meta_exists = True - meta = read_list(rp_path + '.meta') + meta = read_list(rp_meta) meta_dirs = read_between('========= Dir List =========','======= Pacman List ========', meta)[:-1] meta_old_pkgs = read_between('======= Pacman List ========','', meta) - + ### Checking for New and Changed Packages changed_pkgs = set(set(meta_old_pkgs) - current_pkgs) meta_old_pkg_strp = {pkg.split(' ')[0] for pkg in meta_old_pkgs} ### Strip Version @@ -183,26 +180,18 @@ def rollback_to_rp(rp_num): ########################## ### Full Restore Point ### ########################## - ### Decompress if .gz - if os.path.exists(rp_path + '.tar.gz'): - prWorking('Decompressing Restore Point....') - if any(re.findall('pigz', line.lower()) for line in packages): - os.system('pigz -d ' + rp_path + '.tar.gz -f') ### Decompress With pigz - else: - gz_d(rp_path + '.tar.gz') ### Decompress with Python - - ### Untar RP - if os.path.exists(rp_path + '.tar'): - if os.path.exists(rp_path): ### Clean RP is Already Unpacked - shutil.rmtree(rp_path) - prWorking('Unpacking Files from Restore Point Tar....') - untar_dir(rp_path + '.tar') ### Untar RP with Python + rp_cache = rp_path + '/pac_cache' - ### Install All Restore Point Packages - os.system('sudo pacman --needed -U ' + str(rp_path).zfill(2) + '/pac_cache/*') - shutil.rmtree(rp_path + '/pac_cache') + if meta_exists == True: + ### Pass If No Packages Have Changed + if len(changed_pkgs) == 0: + prSuccess('No Packages Have Been Upgraded!') + else: + found_pkgs = fetch_pacman_pkgs({s.strip().replace(' ', '-') for s in changed_pkgs}, search_fs(rp_cache, typ='set')) + os.system('sudo pacman -U ' + ' '.join(found_pkgs)) - if meta_exists == False: + elif meta_exists == False: + os.system('sudo pacman --needed -U ' + rp_cache + '/*') prError('Restore Point #' + str(rp_num).zfill(2) + ' Meta Data Was NOT FOUND!') return prError('Skipping Advanced Features!') @@ -211,30 +200,37 @@ def rollback_to_rp(rp_num): ### Light Restore Point ### ########################### prWorking('Bulk Scanning for ' + str(len(meta_old_pkgs)) + ' Packages...') - found_pkgs = find_pacman_pkgs({s.strip().replace(' ', '-') for s in changed_pkgs}, find_paccache()) + found_pkgs = fetch_pacman_pkgs({s.strip().replace(' ', '-') for s in changed_pkgs}, fetch_paccache()) - ### Pass If No Packages Have Changed + ### Pass If No Packages Have Changed if len(changed_pkgs) == 0: prSuccess('No Packages Have Been Upgraded!') - + ### Pass Comparison if All Packages Found elif len(found_pkgs) == len(changed_pkgs): prSuccess('All Packages Found In Your Local File System!') os.system('sudo pacman -U ' + ' '.join(found_pkgs)) - + ### Branch if Packages are Missing elif len(found_pkgs) < len(changed_pkgs): - pkg_split = trim_pkg_list(found_pkgs) - missing_pkg = set({s.strip().replace(' ', '-') for s in changed_pkgs} - pkg_split) - - ### Show Missing Pkgs - prWarning('Couldn\'t Find The Following Package Versions:') - for pkg in missing_pkg: - prError(pkg) - if yn_frame('Do You Want To Continue Anyway?') == True: + prWarning('Packages Are Missing! Extenting Package Search...') + found_pkgs = fetch_pacman_pkgs({s.strip().replace(' ', '-') for s in changed_pkgs}, fetch_paccache(base_dir + '/restore-points')) + if len(found_pkgs) == len(changed_pkgs): + prSuccess('All Packages Found In Your Local File System!') os.system('sudo pacman -U ' + ' '.join(found_pkgs)) + else: - return prError('Aborting Rollback!') + pkg_split = trim_pkg_list(found_pkgs) + missing_pkg = set({s.strip().replace(' ', '-') for s in changed_pkgs} - pkg_split) + + ### Show Missing Pkgs + prWarning('Couldn\'t Find The Following Package Versions:') + for pkg in missing_pkg: + prError(pkg) + if yn_frame('Do You Want To Continue Anyway?') == True: + os.system('sudo pacman -U ' + ' '.join(found_pkgs)) + else: + return prError('Aborting Rollback!') ### Uninstall New Packages? Executes When Meta is True and When Packages Have Been Added if len(added_pkgs) > 0: @@ -248,11 +244,26 @@ def rollback_to_rp(rp_num): ### Diff Restore Files ### ########################## if not len(meta_dirs) > 0: - if full_rp == True: - shutil.rmtree(rp_path) return prSuccess('Rollback to Restore Point #' + str(rp_num).zfill(2) + ' Complete!') else: + custom_dirs = rp_tar[:-4] + ### Decompress if .gz + if os.path.exists(rp_tar + '.gz'): + prWorking('Decompressing Restore Point....') + if any(re.findall('pigz', line.lower()) for line in current_pkgs): + os.system('pigz -d ' + rp_tar + '.gz -f') ### Decompress With pigz + else: + gz_d(rp_tar + '.gz') ### Decompress with Python + + ### Remove if Custom Dirs Unpacked + if os.path.exists(custom_dirs): + shutil.rmtree(custom_dirs) + + ### Untar RP + prWorking('Unpacking Files from Restore Point Tar....') + untar_dir(rp_tar) + diff_yn = yn_frame('Do You Want to Checksum Diff Restore Point Files Against Your Current File System?') if diff_yn == False: print('Skipping Diff!') @@ -260,8 +271,8 @@ def rollback_to_rp(rp_num): elif diff_yn == True: import multiprocessing as mp - rp_fs = search_fs(rp_path) - rp_fs_trim = set(path[len(rp_path):] for path in search_fs(rp_path)) + rp_fs = search_fs(custom_dirs) + rp_fs_trim = set(path[len(custom_dirs):] for path in search_fs(custom_dirs)) ### Checksum Restore Point Files with a MultiProcessing Pool with mp.Pool(os.cpu_count()) as pool: @@ -271,7 +282,7 @@ def rollback_to_rp(rp_num): total=len(rp_fs_trim), desc='Checksumming Source Files')) ### Compare Checksums For Files That Exist - rp_csum_trim = set(path[len(rp_path):] for path in rp_checksum) + rp_csum_trim = set(path[len(custom_dirs):] for path in rp_checksum) rp_diff = sf_checksum.difference(rp_csum_trim) ### Filter Removed and Changed Files @@ -291,17 +302,9 @@ def rollback_to_rp(rp_num): diff_new = src_fs.difference(rp_fs_trim) ### Print Changed Files For User - if len(diff_changed) + len(diff_new) + len(diff_removed) > 0: - print('The Following Files Have Changed:') - for f in diff_changed: - prChanged(f) - for f in diff_removed: - prRemoved(f) - for f in diff_new: - prAdded(f + ' : NEW FILE!') - else: - shutil.rmtree(rp_path) - return print('No Files Have Been Changed!') + if len(diff_changed) + len(diff_new) + len(diff_removed) == 0: + shutil.rmtree(custom_dirs) + return prSuccess('No Files Have Been Changed!') ####################### ### Overwrite Files ### @@ -310,38 +313,50 @@ def rollback_to_rp(rp_num): prWarning('YOU HAVE NOT CHECKSUMED THE RESTORE POINT! OVERWRITING ALL FILES CAN BE EXTREAMLY DANGOURS!') ow = yn_frame('Do You Still Want to Continue and Restore ALL Files In the Restore Point?') if ow == False: - return print('Skipping Automatic File Restore! Restore Point Files Are Unpacked in ' + rp_path) - - elif ow == True: + return print('Skipping Automatic File Restore! Restore Point Files Are Unpacked in ' + custom_dirs) + + elif ow == True: print('Starting Full File Restore! Please Be Patient As All Files are Overwritten...') - rp_fs = search_fs(rp_path) + rp_fs = search_fs(custom_dirs) for f in rp_fs: - os.system('sudo mkdir -p ' + escape_bash('/'.join(f.split('/')[:-1])) + ' && sudo cp -af ' + escape_bash(f) + ' ' + escape_bash(f[len(rp_path):])) - + prWorking('Please Be Patient. This May Take a While...') + os.system('sudo mkdir -p ' + escape_bash('/'.join(f.split('/')[:-1])) + ' && sudo cp -af ' + escape_bash(f) + ' ' + escape_bash(f[len(custom_dirs):])) + elif diff_yn == True: ow = yn_frame('Do You Want to Automaticly Restore Changed and Missing Files?') if ow == False: - return print('Skipping Automatic Restore! Restore Point Files Are Unpacked in ' + rp_path) - + return print('Skipping Automatic Restore! Restore Point Files Are Unpacked in ' + custom_dirs) + if ow == True: if len(diff_changed) > 0: + prWarning('The Following Files Have Changed:') + for f in diff_changed: + prChanged(f) if yn_frame('Do You Want to Overwrite Files That Have Been CHANGED?') == True: + prWorking('Please Be Patient. This May Take a While...') for f in diff_changed: fs = (f.split(' : ')[0]) - os.system('sudo cp -af ' + escape_bash(rp_path + fs) + ' ' + escape_bash(fs)) - + os.system('sudo cp -af ' + escape_bash(custom_dirs + fs) + ' ' + escape_bash(fs)) + if len(diff_removed) > 0: + prWarning('The Following Files Have Removed:') + for f in diff_removed: + prRemoved(f) if yn_frame('Do You Want to Add Files That Have Been REMOVED?') == True: + prWorking('Please Be Patient. This May Take a While...') for f in diff_removed: fs = (f.split(' : ')[0]) - os.system('sudo mkdir -p ' + escape_bash('/'.join(fs.split('/')[:-1])) + ' && sudo cp -af ' + escape_bash(rp_path + fs) + ' ' + escape_bash(fs)) - + os.system('sudo mkdir -p ' + escape_bash('/'.join(fs.split('/')[:-1])) + ' && sudo cp -af ' + escape_bash(custom_dirs + fs) + ' ' + escape_bash(fs)) + if len(diff_new) > 0: + for f in diff_new: + prAdded(f + ' : NEW FILE!') if yn_frame('Do You Want to Remove Files That Have Beend ADDED?') == True: + prWorking('Please Be Patient. This May Take a While...') for f in diff_new: fs = (f.split(' : ')[0]) os.system('sudo rm ' + fs) - shutil.rmtree(rp_path) + shutil.rmtree(custom_dirs) prSuccess('File Restore Complete!') @@ -358,7 +373,7 @@ def rollback_to_date(date): if len(read_list('/etc/pacman.d/mirrorlist')) > 1: os.system('sudo cp /etc/pacman.d/mirrorlist /etc/pacman.d/mirrorlist.pacback') os.system("echo 'Server=https://archive.archlinux.org/repos/" + date + "/$repo/os/$arch' | sudo tee /etc/pacman.d/mirrorlist >/dev/null") - + ### Run Pacman Update os.system('sudo pacman -Syyuu') @@ -387,14 +402,16 @@ def unlock_rollback(): if update == False: return print('Skipping Update!') + #<#><#><#><#><#><#>#<#>#<# -#+# Rollback Packages +#+# Rollback Packages #<#><#><#><#><#><#>#<#>#<# + def rollback_packages(pkg_list): prWorking('Searching File System for Packages...') - fs_list = find_paccache() + fs_list = fetch_paccache(base_dir + '/restore-points') for pkg in pkg_list: - found = find_pacman_pkgs([pkg], fs_list) + found = fetch_pacman_pkgs([pkg], fs_list) if len(found) > 0: found_pkgs = trim_pkg_list(found_pkgs) prSuccess('Pacback Found the Following Package Versions for ' + pkg + ':') @@ -405,7 +422,7 @@ def rollback_packages(pkg_list): os.system('sudo pacman -U ' + path) else: prError('No Packages Found Under the Name: ' + pkg) - + #<#><#><#><#><#><#>#<#>#<# #+# CLI Args @@ -438,10 +455,10 @@ def rollback_packages(pkg_list): meta = read_between('Pacback RP', 'Pacman List', meta, re_flag=True) for s in meta[:-1]: print(s) - - elif os.path.exists(rp + '.tar') or os.path.exists(rp + '.tar.gz'): + + elif os.path.exists(rp): prError('Meta is Missing For This Restore Point!') - + else: prError('No Restore Point #' + str(args.info).zfill(2) + ' Was NOT Found!') else: diff --git a/core/python_scripts b/core/python_scripts index e8711dc..ee15fa1 160000 --- a/core/python_scripts +++ b/core/python_scripts @@ -1 +1 @@ -Subproject commit e8711dc90d167aca404aefc85010c5f14d5c1015 +Subproject commit ee15fa1fe18d0c8e5845dc97eec69fc0c62e5369