Skip to content

Commit 5b58fd5

Browse files
committed
typo fixes
1 parent a376df4 commit 5b58fd5

3 files changed

Lines changed: 5 additions & 5 deletions

File tree

prime_backup/action/helpers/blob_allocator.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ def __try_get_or_create_direct_blob(self, src_path: Path, src_path_md5: str, st:
307307
can_hash_once = exist is False
308308
if can_hash_once:
309309
# it's certain that this blob is unique, but notes: the following code
310-
# cannot be interrupted (yield), or other generator could make a same blob
310+
# cannot be interrupted (yield), or another generator could make a same blob
311311
policy = _DirectBlobCreatePolicy.hash_once
312312
if policy is None:
313313
policy = _DirectBlobCreatePolicy.default
@@ -364,7 +364,7 @@ def check_changes(new_size: int, new_hash: Optional[str]):
364364
with self.__make_temp_file(src_path_md5) as temp_file_path, _FailureFileDeleter() as file_deleter:
365365
with self.__time_costs.measure_time_cost(CreateBackupTimeCostKey.kind_io_copy):
366366
cr = compressor.copy_compressed(src_path, temp_file_path, calc_hash=True, estimate_read_size=st.st_size, open_r_func=SourceFileNotFoundWrapper.open_rb)
367-
check_changes(cr.read_size, None) # the size must be unchanged, to satisfy the uniqueness
367+
check_changes(cr.read_size, None) # the size must be unchanged to satisfy the uniqueness
368368

369369
raw_size, blob_hash, stored_size = cr.read_size, cr.read_hash, cr.write_size
370370
blob_path = blob_utils.get_blob_path(blob_hash)
@@ -484,7 +484,7 @@ def __try_get_or_create_chunked_blob(self, src_path: Path, src_path_md5: str, st
484484
# The blob is specifically generated by the generator
485485
# if any yield is done, ensure to check __blob_by_hash_cache again
486486

487-
# large files that need to be chunked are not common, and they already contains quite a few chunks
487+
# large files that need to be chunked are not common, and they already contain quite a few chunks,
488488
# so it's efficient enough to directly query for chunks from DB here
489489

490490
process_start_time = time.time()

prime_backup/action/migrate_hash_method_action.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ def run(self) -> None:
7070

7171
self.logger.info('Migrating hash method from {} to {}'.format(meta.hash_method, self.new_hash_method.name))
7272

73-
# XXX: don't load all blob into memory?
73+
# XXX: don't load all blobs into memory?
7474
total_blob_count = session.get_blob_count()
7575
all_hashes = session.get_all_blob_hashes()
7676
all_hash_set = set(all_hashes)

tools/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ optional arguments:
7575
### Prepare
7676
7777
> [!IMPORTANT]
78-
> This tool assert that no hash method / compress method migration was done after those external data backups are made
78+
> This tool asserts that no hash method / compress method migration was done after those external data backups are made
7979
8080
First, confirm the time you upgraded your Prime Backup plugin to affected version (v1.9.0 ~ v1.9.2). Let's say you upgraded PrimeBackup at time T0
8181

0 commit comments

Comments
 (0)