kristofer revised this gist . Go to revision
1 file changed, 0 insertions, 0 deletions
gistfile1.txt renamed to S3access.md
File renamed without changes
kristofer revised this gist . Go to revision
2 files changed, 281 insertions
another-access-script.py(file created)
@@ -0,0 +1,196 @@ | |||
1 | + | import boto3 | |
2 | + | from botocore.exceptions import ClientError, NoCredentialsError | |
3 | + | import os | |
4 | + | from pathlib import Path | |
5 | + | ||
6 | + | class S3BucketManager: | |
7 | + | def __init__(self, bucket_name, region_name='us-east-1'): | |
8 | + | """ | |
9 | + | Initialize S3 client and set bucket name | |
10 | + | ||
11 | + | Args: | |
12 | + | bucket_name (str): Name of the S3 bucket | |
13 | + | region_name (str): AWS region name (default: us-east-1) | |
14 | + | """ | |
15 | + | self.bucket_name = bucket_name | |
16 | + | self.region_name = region_name | |
17 | + | ||
18 | + | try: | |
19 | + | # Initialize S3 client | |
20 | + | self.s3_client = boto3.client('s3', region_name=region_name) | |
21 | + | self.s3_resource = boto3.resource('s3', region_name=region_name) | |
22 | + | self.bucket = self.s3_resource.Bucket(bucket_name) | |
23 | + | except NoCredentialsError: | |
24 | + | print("Error: AWS credentials not found. Please configure your credentials.") | |
25 | + | raise | |
26 | + | except Exception as e: | |
27 | + | print(f"Error initializing S3 client: {e}") | |
28 | + | raise | |
29 | + | ||
30 | + | def list_objects(self, prefix=''): | |
31 | + | """ | |
32 | + | List all objects in the bucket with optional prefix filter | |
33 | + | ||
34 | + | Args: | |
35 | + | prefix (str): Optional prefix to filter objects | |
36 | + | ||
37 | + | Returns: | |
38 | + | list: List of object keys | |
39 | + | """ | |
40 | + | try: | |
41 | + | objects = [] | |
42 | + | paginator = self.s3_client.get_paginator('list_objects_v2') | |
43 | + | pages = paginator.paginate(Bucket=self.bucket_name, Prefix=prefix) | |
44 | + | ||
45 | + | for page in pages: | |
46 | + | if 'Contents' in page: | |
47 | + | for obj in page['Contents']: | |
48 | + | objects.append({ | |
49 | + | 'key': obj['Key'], | |
50 | + | 'size': obj['Size'], | |
51 | + | 'last_modified': obj['LastModified'] | |
52 | + | }) | |
53 | + | ||
54 | + | return objects | |
55 | + | except ClientError as e: | |
56 | + | print(f"Error listing objects: {e}") | |
57 | + | return [] | |
58 | + | ||
59 | + | def upload_file(self, local_file_path, s3_key=None): | |
60 | + | """ | |
61 | + | Upload a file to S3 bucket | |
62 | + | ||
63 | + | Args: | |
64 | + | local_file_path (str): Path to local file | |
65 | + | s3_key (str): S3 object key (if None, uses filename) | |
66 | + | ||
67 | + | Returns: | |
68 | + | bool: True if successful, False otherwise | |
69 | + | """ | |
70 | + | if s3_key is None: | |
71 | + | s3_key = Path(local_file_path).name | |
72 | + | ||
73 | + | try: | |
74 | + | self.s3_client.upload_file(local_file_path, self.bucket_name, s3_key) | |
75 | + | print(f"Successfully uploaded {local_file_path} to s3://{self.bucket_name}/{s3_key}") | |
76 | + | return True | |
77 | + | except FileNotFoundError: | |
78 | + | print(f"Error: File {local_file_path} not found") | |
79 | + | return False | |
80 | + | except ClientError as e: | |
81 | + | print(f"Error uploading file: {e}") | |
82 | + | return False | |
83 | + | ||
84 | + | def download_file(self, s3_key, local_file_path): | |
85 | + | """ | |
86 | + | Download a file from S3 bucket | |
87 | + | ||
88 | + | Args: | |
89 | + | s3_key (str): S3 object key | |
90 | + | local_file_path (str): Local path to save file | |
91 | + | ||
92 | + | Returns: | |
93 | + | bool: True if successful, False otherwise | |
94 | + | """ | |
95 | + | try: | |
96 | + | self.s3_client.download_file(self.bucket_name, s3_key, local_file_path) | |
97 | + | print(f"Successfully downloaded s3://{self.bucket_name}/{s3_key} to {local_file_path}") | |
98 | + | return True | |
99 | + | except ClientError as e: | |
100 | + | print(f"Error downloading file: {e}") | |
101 | + | return False | |
102 | + | ||
103 | + | def delete_object(self, s3_key): | |
104 | + | """ | |
105 | + | Delete an object from S3 bucket | |
106 | + | ||
107 | + | Args: | |
108 | + | s3_key (str): S3 object key to delete | |
109 | + | ||
110 | + | Returns: | |
111 | + | bool: True if successful, False otherwise | |
112 | + | """ | |
113 | + | try: | |
114 | + | self.s3_client.delete_object(Bucket=self.bucket_name, Key=s3_key) | |
115 | + | print(f"Successfully deleted s3://{self.bucket_name}/{s3_key}") | |
116 | + | return True | |
117 | + | except ClientError as e: | |
118 | + | print(f"Error deleting object: {e}") | |
119 | + | return False | |
120 | + | ||
121 | + | def get_object_url(self, s3_key, expiration=3600): | |
122 | + | """ | |
123 | + | Generate a presigned URL for an S3 object | |
124 | + | ||
125 | + | Args: | |
126 | + | s3_key (str): S3 object key | |
127 | + | expiration (int): URL expiration time in seconds (default: 1 hour) | |
128 | + | ||
129 | + | Returns: | |
130 | + | str: Presigned URL or None if error | |
131 | + | """ | |
132 | + | try: | |
133 | + | url = self.s3_client.generate_presigned_url( | |
134 | + | 'get_object', | |
135 | + | Params={'Bucket': self.bucket_name, 'Key': s3_key}, | |
136 | + | ExpiresIn=expiration | |
137 | + | ) | |
138 | + | return url | |
139 | + | except ClientError as e: | |
140 | + | print(f"Error generating presigned URL: {e}") | |
141 | + | return None | |
142 | + | ||
143 | + | def object_exists(self, s3_key): | |
144 | + | """ | |
145 | + | Check if an object exists in the bucket | |
146 | + | ||
147 | + | Args: | |
148 | + | s3_key (str): S3 object key | |
149 | + | ||
150 | + | Returns: | |
151 | + | bool: True if object exists, False otherwise | |
152 | + | """ | |
153 | + | try: | |
154 | + | self.s3_client.head_object(Bucket=self.bucket_name, Key=s3_key) | |
155 | + | return True | |
156 | + | except ClientError as e: | |
157 | + | if e.response['Error']['Code'] == '404': | |
158 | + | return False | |
159 | + | else: | |
160 | + | print(f"Error checking object existence: {e}") | |
161 | + | return False | |
162 | + | ||
163 | + | # Example usage | |
164 | + | if __name__ == "__main__": | |
165 | + | # Replace with your bucket name | |
166 | + | BUCKET_NAME = "your-bucket-name" | |
167 | + | REGION = "us-east-1" # Replace with your region | |
168 | + | ||
169 | + | # Initialize S3 manager | |
170 | + | s3_manager = S3BucketManager(BUCKET_NAME, REGION) | |
171 | + | ||
172 | + | # List objects in bucket | |
173 | + | print("Objects in bucket:") | |
174 | + | objects = s3_manager.list_objects() | |
175 | + | for obj in objects: | |
176 | + | print(f" {obj['key']} ({obj['size']} bytes)") | |
177 | + | ||
178 | + | # Example operations (uncomment to use): | |
179 | + | ||
180 | + | # Upload a file | |
181 | + | # s3_manager.upload_file("local_file.txt", "folder/remote_file.txt") | |
182 | + | ||
183 | + | # Download a file | |
184 | + | # s3_manager.download_file("folder/remote_file.txt", "downloaded_file.txt") | |
185 | + | ||
186 | + | # Check if object exists | |
187 | + | # if s3_manager.object_exists("folder/remote_file.txt"): | |
188 | + | # print("Object exists!") | |
189 | + | ||
190 | + | # Generate presigned URL | |
191 | + | # url = s3_manager.get_object_url("folder/remote_file.txt", expiration=7200) | |
192 | + | # print(f"Presigned URL: {url}") | |
193 | + | ||
194 | + | # Delete an object | |
195 | + | # s3_manager.delete_object("folder/remote_file.txt") | |
196 | + |
gistfile1.txt(file created)
@@ -0,0 +1,85 @@ | |||
1 | + | This is both the Python code to access an S3 bucket and the IAM policy JSON for the bucket permissions. | |
2 | + | ||
3 | + | An IAM policy JSON that you can attach to your AWS account to grant the necessary permissions | |
4 | + | for S3 bucket access: | |
5 | + | ||
6 | + | ## IAM Policy JSON for S3 Bucket Access | |
7 | + | ||
8 | + | ```json | |
9 | + | { | |
10 | + | "Version": "2012-10-17", | |
11 | + | "Statement": [ | |
12 | + | { | |
13 | + | "Sid": "ListBucketPermission", | |
14 | + | "Effect": "Allow", | |
15 | + | "Action": [ | |
16 | + | "s3:ListBucket" | |
17 | + | ], | |
18 | + | "Resource": "arn:aws:s3:::your-bucket-name" | |
19 | + | }, | |
20 | + | { | |
21 | + | "Sid": "ObjectLevelPermissions", | |
22 | + | "Effect": "Allow", | |
23 | + | "Action": [ | |
24 | + | "s3:GetObject", | |
25 | + | "s3:PutObject", | |
26 | + | "s3:DeleteObject", | |
27 | + | "s3:GetObjectAcl" | |
28 | + | ], | |
29 | + | "Resource": "arn:aws:s3:::your-bucket-name/*" | |
30 | + | } | |
31 | + | ] | |
32 | + | } | |
33 | + | ``` | |
34 | + | ||
35 | + | ## Setup Instructions | |
36 | + | ||
37 | + | ### 1. Install Required Python Package | |
38 | + | ```bash | |
39 | + | pip install boto3 | |
40 | + | ``` | |
41 | + | ||
42 | + | ### 2. Configure AWS Credentials | |
43 | + | You have several options to configure AWS credentials: | |
44 | + | ||
45 | + | **Option A: AWS CLI Configuration** | |
46 | + | ```bash | |
47 | + | aws configure | |
48 | + | ``` | |
49 | + | ||
50 | + | **Option B: Environment Variables** | |
51 | + | ```bash | |
52 | + | export AWS_ACCESS_KEY_ID=your_access_key | |
53 | + | export AWS_SECRET_ACCESS_KEY=your_secret_key | |
54 | + | export AWS_DEFAULT_REGION=us-east-1 | |
55 | + | ``` | |
56 | + | ||
57 | + | **Option C: IAM Role (if running on EC2)** | |
58 | + | Attach an IAM role with the above policy to your EC2 instance. | |
59 | + | ||
60 | + | ### 3. Apply the IAM Policy | |
61 | + | ||
62 | + | Replace `your-bucket-name` in the policy JSON with your actual bucket name, then: | |
63 | + | ||
64 | + | 1. **For IAM User**: Go to IAM → Users → Select your user → Permissions → Add permissions → Create policy → JSON tab → Paste the policy | |
65 | + | 2. **For IAM Role**: Go to IAM → Roles → Select your role → Permissions → Add permissions → Create policy → JSON tab → Paste the policy | |
66 | + | ||
67 | + | ### 4. Usage | |
68 | + | Replace `BUCKET_NAME` in the Python code with your actual bucket name and run the script. | |
69 | + | ||
70 | + | ## Policy Permission Breakdown | |
71 | + | ||
72 | + | - **`s3:ListBucket`**: Allows listing objects in the bucket | |
73 | + | - **`s3:GetObject`**: Allows downloading/reading objects | |
74 | + | - **`s3:PutObject`**: Allows uploading objects | |
75 | + | - **`s3:DeleteObject`**: Allows deleting objects | |
76 | + | - **`s3:GetObjectAcl`**: Allows reading object ACLs (needed for some presigned URL operations) | |
77 | + | ||
78 | + | ## Security Notes | |
79 | + | ||
80 | + | - The policy grants access only to the specified bucket | |
81 | + | - For production use, consider adding IP restrictions or MFA requirements | |
82 | + | - You can restrict access to specific folder paths by modifying the `Resource` ARN (e.g., `arn:aws:s3:::your-bucket-name/specific-folder/*`) | |
83 | + | ||
84 | + | This setup will give you full programmatic access to your S3 bucket with the Python code provided! | |
85 | + |
kristofer revised this gist . Go to revision
2 files changed, 91 insertions
s3example.java(file created)
@@ -0,0 +1,56 @@ | |||
1 | + | import com.amazonaws.services.s3.AmazonS3; | |
2 | + | import com.amazonaws.services.s3.AmazonS3ClientBuilder; | |
3 | + | import com.amazonaws.services.s3.model.*; | |
4 | + | import java.io.File; | |
5 | + | ||
6 | + | public class S3Examples { | |
7 | + | ||
8 | + | private final AmazonS3 s3 = AmazonS3ClientBuilder.defaultClient(); | |
9 | + | ||
10 | + | // Create an S3 bucket | |
11 | + | public void createBucket(String bucketName) { | |
12 | + | s3.createBucket(bucketName); | |
13 | + | } | |
14 | + | ||
15 | + | // Upload an object to S3 | |
16 | + | public void uploadObject(String bucketName, String key, File file) { | |
17 | + | s3.putObject(bucketName, key, file); | |
18 | + | } | |
19 | + | ||
20 | + | // Download an object from S3 | |
21 | + | public void downloadObject(String bucketName, String key, File file) { | |
22 | + | s3.getObject(new GetObjectRequest(bucketName, key), file); | |
23 | + | } | |
24 | + | ||
25 | + | // List objects in a bucket | |
26 | + | public ObjectListing listObjects(String bucketName) { | |
27 | + | return s3.listObjects(bucketName); | |
28 | + | } | |
29 | + | ||
30 | + | // Generate pre-signed URL to share an S3 object | |
31 | + | public URL generatePresignedUrl(String bucketName, String key) { | |
32 | + | Date expiration = new Date(); | |
33 | + | long expTimeMillis = expiration.getTime(); | |
34 | + | expTimeMillis += 1000 * 60 * 60; // Add 1 hour. | |
35 | + | expiration.setTime(expTimeMillis); | |
36 | + | ||
37 | + | GeneratePresignedUrlRequest generatePresignedUrlRequest = | |
38 | + | new GeneratePresignedUrlRequest(bucketName, key) | |
39 | + | .withMethod(HttpMethod.GET) | |
40 | + | .withExpiration(expiration); | |
41 | + | ||
42 | + | return s3.generatePresignedUrl(generatePresignedUrlRequest); | |
43 | + | } | |
44 | + | } | |
45 | + | // The key steps are: | |
46 | + | ||
47 | + | // 1. Create an S3Client object to interact with S3 | |
48 | + | // 2. Call createBucket() to create a new S3 bucket | |
49 | + | // 3. Upload objects using putObject(), specifying bucket name, key, and file | |
50 | + | // 4. Download objects using getObject(), specifying bucket name, key, and target file | |
51 | + | // 5. List objects in a bucket using listObjects() | |
52 | + | // 6. Generate a pre-signed URL using generatePresignedUrl() by specifying bucket, | |
53 | + | // key, expiration date, and HTTP method. | |
54 | + | ||
55 | + | // The pre-signed URL allows temporary access to private S3 objects without requiring AWS credentials. | |
56 | + | // The URL is only valid until the specified expiration date/time. |
s3example.py(file created)
@@ -0,0 +1,35 @@ | |||
1 | + | import boto3 | |
2 | + | from botocore.exceptions import ClientError | |
3 | + | ||
4 | + | s3 = boto3.client('s3') | |
5 | + | ||
6 | + | # Create a bucket | |
7 | + | bucket_name = 'my-bucket' | |
8 | + | location = {'LocationConstraint':'us-east-1'} | |
9 | + | s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=location) | |
10 | + | ||
11 | + | # Upload an object to bucket | |
12 | + | filename = 'data.csv' | |
13 | + | object_name = 'data/data.csv' | |
14 | + | s3.upload_file(filename, bucket_name, object_name) | |
15 | + | ||
16 | + | # List objects in bucket | |
17 | + | response = s3.list_objects(Bucket=bucket_name) | |
18 | + | for object in response['Contents']: | |
19 | + | print(object['Key']) | |
20 | + | ||
21 | + | # Download an object | |
22 | + | s3.download_file(bucket_name, object_name, 'data_download.csv') | |
23 | + | ||
24 | + | # Generate pre-signed URL to share an object | |
25 | + | url = s3.generate_presigned_url( | |
26 | + | ClientMethod='get_object', | |
27 | + | Params={'Bucket': bucket_name, 'Key': object_name}, | |
28 | + | ExpiresIn=3600) | |
29 | + | ||
30 | + | print(url) | |
31 | + | ||
32 | + | # This creates an S3 bucket, uploads a local CSV file to the bucket under object name 'data/data.csv', | |
33 | + | # lists all objects in the bucket, downloads the object to a local file, and generates a pre-signed URL | |
34 | + | # that allows temporary access to download the object for anyone with the URL. | |
35 | + | # The pre-signed URL expires after 3600 seconds (1 hour). |