Skip to content

Commit

Permalink
Resolved naming conflict of filename_without_archive_extensions
Browse files Browse the repository at this point in the history
- renamed the already existint filename_without_archive_extensions to filename_without_archive_extensions_multipart
  • Loading branch information
akahles committed Nov 19, 2024
1 parent 58a36e1 commit 169f07e
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion archiver/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def extract_archive(source_path, destination_directory_path, partial_extraction_
uncompress_and_extract(archive_files, destination_directory_path, threads, partial_extraction_path=partial_extraction_path)

logging.info("Archive extracted to: " + helpers.get_absolute_path_string(destination_directory_path))
return destination_directory_path / helpers.filename_without_extensions(source_path)
return destination_directory_path / helpers.filename_without_archive_extensions(source_path)


def uncompress_and_extract(archive_file_paths, destination_directory_path, threads, partial_extraction_path=None, encrypted=False):
Expand Down
6 changes: 3 additions & 3 deletions archiver/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,10 +355,10 @@ def filepath_without_archive_extensions(path:Path) -> Path:
def infer_source_name(source_path: Path) -> Path:

if not source_path.is_dir():
return filepath_without_extensions(source_path)
return filepath_without_archive_extensions(source_path)
else:
all_files = [p for p in source_path.iterdir() if p.is_file()]
unique_names = list(set([filepath_without_extensions(f) for f in all_files]))
unique_names = list(set([filepath_without_archive_extensions(f) for f in all_files]))

if len(unique_names) == 0:
terminate_with_message('There are no archive files present')
Expand All @@ -368,7 +368,7 @@ def infer_source_name(source_path: Path) -> Path:
return unique_names[0]


def filename_without_archive_extensions(path):
def filename_without_archive_extensions_multipart(path):
"""Removes known archive extensions but keeps extensions like .partX"""
name = path.name

Expand Down
8 changes: 4 additions & 4 deletions archiver/integrity.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def check_integrity(source_path, deep_flag=False, threads=None, work_dir=None, a
if source_path.is_dir():
integrity_result = check_archive_list_integrity(source_path, archive_name)
else:
file_path = source_path.parent / Path(helpers.filename_without_archive_extensions(source_path))
file_path = source_path.parent / Path(helpers.filename_without_archive_extensions_multipart(source_path))
integrity_result = check_archive_part_integrity(file_path)

if not integrity_result:
Expand Down Expand Up @@ -126,7 +126,7 @@ def verify_relative_symbolic_links(archives_with_hashes):
symlink_dict = {} # all symlinks found across listing
for archive in archives_with_hashes:
part_path = archive[0]
part_listing = part_path.parent / (helpers.filename_without_archive_extensions(part_path) + LISTING_SUFFIX)
part_listing = part_path.parent / (helpers.filename_without_archive_extensions_multipart(part_path) + LISTING_SUFFIX)
entries = parse_tar_listing(part_listing)

file_set.update([str(e.path).rstrip('/') for e in entries])
Expand Down Expand Up @@ -237,7 +237,7 @@ def get_hashes_for_archive(archive_path):
hash_file_path = archive_path.parent / (archive_path.name + ".md5")
helpers.terminate_if_path_nonexistent(hash_file_path)

hash_listing_path = archive_path.parent / (helpers.filename_without_archive_extensions(archive_path) + ".md5")
hash_listing_path = archive_path.parent / (helpers.filename_without_archive_extensions_multipart(archive_path) + ".md5")
helpers.terminate_if_path_nonexistent(hash_listing_path)

return [(archive_file_path, hash_file_path, hash_listing_path)]
Expand All @@ -260,7 +260,7 @@ def get_archives_with_hashes_from_directory(source_path):
hash_path = archive.parent / (archive.name + ".md5")
helpers.terminate_if_path_nonexistent(hash_path)

hash_listing_path = Path(archive.parent) / (helpers.filename_without_archive_extensions(archive) + ".md5")
hash_listing_path = Path(archive.parent) / (helpers.filename_without_archive_extensions_multipart(archive) + ".md5")
helpers.terminate_if_path_nonexistent(hash_listing_path)

archive_with_hash_path = (archive, hash_path, hash_listing_path)
Expand Down
2 changes: 1 addition & 1 deletion archiver/listing.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def get_listing_files_for_path(path):

# If specific file is used, maybe not all results of search path will be shown (since they could be in different file)
helpers.file_is_valid_archive_or_terminate(path)
listing_path = path.parent / (helpers.filename_without_archive_extensions(path) + ".tar.lst")
listing_path = path.parent / (helpers.filename_without_archive_extensions_multipart(path) + ".tar.lst")
helpers.terminate_if_path_nonexistent(path)

return [listing_path]
Expand Down

0 comments on commit 169f07e

Please sign in to comment.