diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index 9c234612..41ecdef9 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -658,7 +658,15 @@ def _generate_rollback_archetype_catalog( # If there is no local catalog, this is a NO-OP if os.path.exists(local): - if not s3.file_exists_in_bucket(bucket, remote): + existed = False + try: + existed = s3.file_exists_in_bucket(bucket, remote) + except ValueError as e: + logger.error( + "Error: Can not generate archtype-catalog.xml due to: %s", e + ) + return 0 + if not existed: # If there is no catalog in the bucket...this is a NO-OP return 0 else: @@ -758,7 +766,15 @@ def _generate_upload_archetype_catalog( # If there is no local catalog, this is a NO-OP if os.path.exists(local): - if not s3.file_exists_in_bucket(bucket, remote): + existed = False + try: + existed = s3.file_exists_in_bucket(bucket, remote) + except ValueError as e: + logger.error( + "Error: Can not generate archtype-catalog.xml due to: %s", e + ) + return 0 + if not existed: __gen_all_digest_files(local) # If there is no catalog in the bucket, just upload what we have locally return True diff --git a/charon/storage.py b/charon/storage.py index 1cc88398..92b35ca3 100644 --- a/charon/storage.py +++ b/charon/storage.py @@ -148,7 +148,15 @@ async def path_upload_handler( path_key = os.path.join(key_prefix, path) if key_prefix else path file_object: s3.Object = bucket.Object(path_key) - existed = await self.__run_async(self.__file_exists, file_object) + existed = False + try: + existed = await self.__run_async(self.__file_exists, file_object) + except (ClientError, HTTPClientError) as e: + logger.error( + "Error: file existence check failed due to error: %s", e + ) + failed.append(full_file_path) + return sha1 = read_sha1(full_file_path) (content_type, _) = mimetypes.guess_type(full_file_path) if not content_type: @@ -262,7 +270,15 @@ async def path_upload_handler( path_key = os.path.join(key_prefix, path) if key_prefix else path file_object: s3.Object = bucket.Object(path_key) - existed = await self.__run_async(self.__file_exists, file_object) + existed = False + try: + existed = await self.__run_async(self.__file_exists, file_object) + except (ClientError, HTTPClientError) as e: + logger.error( + "Error: file existence check failed due to error: %s", e + ) + failed.append(full_file_path) + return f_meta = {} need_overwritten = True sha1 = read_sha1(full_file_path) @@ -367,7 +383,15 @@ async def path_delete_handler( logger.debug('(%d/%d) Deleting %s from bucket %s', index, total, path, bucket_name) path_key = os.path.join(key_prefix, path) if key_prefix else path file_object = bucket.Object(path_key) - existed = await self.__run_async(self.__file_exists, file_object) + existed = False + try: + existed = await self.__run_async(self.__file_exists, file_object) + except (ClientError, HTTPClientError) as e: + logger.error( + "Error: file existence check failed due to error: %s", e + ) + failed.append(full_file_path) + return if existed: # NOTE: If we're NOT using the product key to track collisions # (in the case of metadata), then this prods array will remain @@ -458,7 +482,15 @@ def delete_manifest(self, product_key: str, target: str, manifest_bucket_name: s manifest_bucket = self.__get_bucket(manifest_bucket_name) file_object: s3.Object = manifest_bucket.Object(path_key) - if self.__file_exists(file_object): + existed = False + try: + existed = self.__file_exists(file_object) + except (ClientError, HTTPClientError) as e: + logger.error( + "Error: file existence check failed due to error: %s", e + ) + return + if existed: manifest_bucket.delete_objects(Delete={"Objects": [{"Key": path_key}]}) else: logger.warning( @@ -555,8 +587,7 @@ def __file_exists(self, file_object: Object) -> bool: if isinstance(e, ClientError) and e.response["Error"]["Code"] == "404": return False else: - logger.error("Error: file existence check failed due " - "to error: %s", e) + raise e def __get_prod_info( self, file: str, bucket_name: str