mirror of
				https://github.com/minio/minio.git
				synced 2025-10-31 08:11:19 +01:00 
			
		
		
		
	With this change, MinIO's ILM supports transitioning objects to a remote tier. This change includes support for Azure Blob Storage, AWS S3 compatible object storage incl. MinIO and Google Cloud Storage as remote tier storage backends. Some new additions include: - Admin APIs remote tier configuration management - Simple journal to track remote objects to be 'collected' This is used by object API handlers which 'mutate' object versions by overwriting/replacing content (Put/CopyObject) or removing the version itself (e.g DeleteObjectVersion). - Rework of previous ILM transition to fit the new model In the new model, a storage class (a.k.a remote tier) is defined by the 'remote' object storage type (one of s3, azure, GCS), bucket name and a prefix. * Fixed bugs, review comments, and more unit-tests - Leverage inline small object feature - Migrate legacy objects to the latest object format before transitioning - Fix restore to particular version if specified - Extend SharedDataDirCount to handle transitioned and restored objects - Restore-object should accept version-id for version-suspended bucket (#12091) - Check if remote tier creds have sufficient permissions - Bonus minor fixes to existing error messages Co-authored-by: Poorna Krishnamoorthy <poorna@minio.io> Co-authored-by: Krishna Srinivas <krishna@minio.io> Signed-off-by: Harshavardhana <harsha@minio.io>
		
			
				
	
	
		
			38 lines
		
	
	
		
			1.2 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			38 lines
		
	
	
		
			1.2 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| #!/usr/bin/env python
 | |
| # -*- coding: utf-8 -*-
 | |
| 
 | |
| import logging
 | |
| 
 | |
| import boto3
 | |
| from boto3.session import Session
 | |
| from botocore.session import get_session
 | |
| 
 | |
| from client_grants import ClientGrantsCredentialProvider
 | |
| 
 | |
| boto3.set_stream_logger('boto3.resources', logging.DEBUG)
 | |
| 
 | |
| bc_session = get_session()
 | |
| bc_session.get_component('credential_provider').insert_before(
 | |
|     'env',
 | |
|     ClientGrantsCredentialProvider('NZLOOFRSluw9RfIkuHGqfk1HFp4a',
 | |
|                                    '0Z4VTG8uJBSekn42HE40DK9vQb4a'),
 | |
| )
 | |
| 
 | |
| boto3_session = Session(botocore_session=bc_session)
 | |
| s3 = boto3_session.resource('s3', endpoint_url='http://localhost:9000')
 | |
| 
 | |
| with open('/etc/hosts', 'rb') as data:
 | |
|     s3.meta.client.upload_fileobj(data,
 | |
|                                   'testbucket',
 | |
|                                   'hosts',
 | |
|                                   ExtraArgs={'ServerSideEncryption': 'AES256'})
 | |
| 
 | |
| # Upload with server side encryption, using temporary credentials
 | |
| s3.meta.client.upload_file('/etc/hosts',
 | |
|                            'testbucket',
 | |
|                            'hosts',
 | |
|                            ExtraArgs={'ServerSideEncryption': 'AES256'})
 | |
| 
 | |
| # Download encrypted object using temporary credentials
 | |
| s3.meta.client.download_file('testbucket', 'hosts', '/tmp/hosts')
 |