From 637bd4df3a8e98da353ce2a81b11d685f668e40f Mon Sep 17 00:00:00 2001 From: Gang Li Date: Tue, 8 Mar 2022 15:44:15 +0800 Subject: [PATCH] Fix: Make prodinfo check failure not an error --- charon/pkgs/npm.py | 21 +++++++++++++-------- charon/storage.py | 22 +++++++++------------- tests/test_s3client.py | 10 +++++++--- 3 files changed, 29 insertions(+), 24 deletions(-) diff --git a/charon/pkgs/npm.py b/charon/pkgs/npm.py index ceb50db9..7a4bc663 100644 --- a/charon/pkgs/npm.py +++ b/charon/pkgs/npm.py @@ -113,12 +113,12 @@ def handle_npm_uploading( succeeded = True for target in targets: manifest_folder = target[0] - logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name) if not manifest_bucket_name: logger.warning( 'Warning: No manifest bucket is provided, will ignore the process of manifest ' 'uploading\n') else: + logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name) manifest_name, manifest_full_path = write_manifest(valid_paths, target_dir, product) client.upload_manifest( manifest_name, manifest_full_path, @@ -216,13 +216,18 @@ def handle_npm_del( ) logger.info("Files deletion done\n") - manifest_folder = target[0] - logger.info( - "Start deleting manifest from s3 bucket %s", - manifest_bucket_name - ) - client.delete_manifest(product, manifest_folder, manifest_bucket_name) - logger.info("Manifest deletion is done\n") + if manifest_bucket_name: + manifest_folder = target[0] + logger.info( + "Start deleting manifest from s3 bucket %s in folder %s", + manifest_bucket_name, manifest_folder + ) + client.delete_manifest(product, manifest_folder, manifest_bucket_name) + logger.info("Manifest deletion is done\n") + else: + logger.warning( + 'Warning: No manifest bucket is provided, will ignore the process of manifest ' + 'deletion\n') logger.info( "Start generating package.json for package: %s in bucket %s", diff --git a/charon/storage.py b/charon/storage.py index 0cefed71..bcb2cc87 100644 --- a/charon/storage.py +++ b/charon/storage.py @@ -203,12 +203,10 @@ async def path_upload_handler( failed.append(full_file_path) return else: - succeeded = await handle_existed( + await handle_existed( full_file_path, sha1, main_path_key, - main_bucket_name, main_file_object, failed + main_bucket_name, main_file_object ) - if not succeeded: - return # do copy: for target_ in extra_prefixed_buckets: @@ -241,12 +239,12 @@ async def path_upload_handler( else: await handle_existed( full_file_path, sha1, extra_path_key, - extra_bucket_name, file_object, failed + extra_bucket_name, file_object ) async def handle_existed( file_path, file_sha1, path_key, - bucket_name, file_object, failed_paths + bucket_name, file_object ) -> bool: logger.debug( "File %s already exists in bucket %s, check if need to update product.", @@ -257,10 +255,9 @@ async def handle_existed( f_meta[CHECKSUM_META_KEY] if CHECKSUM_META_KEY in f_meta else "" ) if checksum != "" and checksum.strip() != file_sha1: - logger.warning('Error: checksum check failed. The file %s is ' + logger.warning('Warning: checksum check failed. The file %s is ' 'different from the one in S3 bucket %s. Product: %s', path_key, bucket_name, product) - failed_paths.append(file_path) return False (prods, no_error) = await self.__run_async( self.__get_prod_info, @@ -277,7 +274,6 @@ async def handle_existed( path_key, bucket_name, prods ) if not result: - failed_paths.append(file_path) return False return True @@ -703,8 +699,8 @@ async def __update_prod_info( logger.debug("Updated product infomation for file %s", file) return True except (ClientError, HTTPClientError) as e: - logger.error("ERROR: Can not update product info for file %s " - "due to error: %s", file, e) + logger.warning("WARNING: Can not update product info for file %s " + "due to error: %s", file, e) return False else: logger.debug("Removing product infomation file for file %s " @@ -724,8 +720,8 @@ async def __update_prod_info( logger.debug("Removed product infomation file for file %s", file) return True except (ClientError, HTTPClientError) as e: - logger.error("ERROR: Can not delete product info file for file %s " - "due to error: %s", file, e) + logger.warning("WARNING: Can not delete product info file for file %s " + "due to error: %s", file, e) return False def __path_handler_count_wrapper( diff --git a/tests/test_s3client.py b/tests/test_s3client.py index 7100c8cf..dde8ab49 100644 --- a/tests/test_s3client.py +++ b/tests/test_s3client.py @@ -14,7 +14,6 @@ limitations under the License. """ from typing import List - from boto3_type_annotations import s3 from charon.storage import S3Client, CHECKSUM_META_KEY from charon.utils.archive import extract_zip_all @@ -375,16 +374,21 @@ def test_exists_override_failing(self): product="apache-commons", root=temp_root ) self.assertEqual(0, len(failed_paths)) + sha1 = read_sha1(all_files[0]) + path = all_files[0][len(temp_root) + 1:] # Change content to make hash changes with open(all_files[0], "w+", encoding="utf-8") as f: f.write("changed content") + sha1_changed = read_sha1(all_files[0]) + self.assertNotEqual(sha1, sha1_changed) failed_paths = self.s3_client.upload_files( all_files, targets=[(MY_BUCKET, None)], product="apache-commons-2", root=temp_root ) - self.assertEqual(1, len(failed_paths)) - self.assertIn(failed_paths[0], all_files[0]) + bucket = self.mock_s3.Bucket(MY_BUCKET) + file_obj = bucket.Object(path) + self.assertEqual(sha1, file_obj.metadata[CHECKSUM_META_KEY]) def __prepare_files(self): test_zip = zipfile.ZipFile(