What is Boto 3?
Boto3 is AWS SDK for Python to create, configure, and manage AWS services, such as Amazon Elastic Compute Cloud (Amazon EC2) and Amazon Simple Storage Service (Amazon S3), etc. The SDK provides an object-oriented API as well as low-level access to AWS services.
For our project, we will load the values of different variables from the JSON file and also save the output in the JSON format & for that create json_
operations.py
file and add the below code in it
import json
import os
def loadJsonData(file_path):
blank_json = {}
if os.path.exists(file_path):
try:
f = open(file_path, "r")
data = json.load(f)
f.close()
return data
except ValueError:
print("Decode error")
return blank_json
else:
return blank_json
def saveJsonData(file_path, data):
blank_json = {}
stringified_json_data = json.dumps(data)
f = open(file_path, "w")
f.write(stringified_json_data)
f.close()
return True
Playing with AWS EC2:
Now let’s create an EC2 instance using Boto3 and play with it
For operations related to EC2 instance create
aws_
ec2.py
file and we will use that file throughout!
Step 1: Create a config file
- Create
config.json
file and add the below code in it later on we will load the data from this file
{
"key_path": "./aws_ec2_key.pem",
"key_name": "ec2-key-pair",
"ami_id": "ami-0a606d8395a538502",
"instance_type": "t2.micro",
"region_name": "us-east-2",
"ec2_data_path":"./configs/ec2_data.json"
}
Step 2: Create a data file for an EC2 instance
- Create
ec2_data.json
file and keep it blank!
Step 3: Create AWS key pair for an EC2 instance
- Add the below code in
aws_
ec2.py
file
import os
import boto3
import json_operations
import json
# loading data from JSON
config_data = json_operations.loadJsonData("./configs/config.json")
key_path = config_data["key_path"]
key_name = config_data["key_name"]
ami_id = config_data["ami_id"]
instance_type = config_data["instance_type"]
region_name = config_data["region_name"]
ec2_json_data_path = config_data["ec2_data_path"]
ec2_data = json_operations.loadJsonData(ec2_json_data_path)
ec2_client = boto3.client("ec2", region_name=region_name)
# create key_pair for EC2 instance
def create_key_pair():
if not os.path.exists(key_path):
key_pair = ec2_client.create_key_pair(KeyName=key_name)
private_key = key_pair["KeyMaterial"]
# writing the key & changing it's permission to 400
with os.fdopen(os.open(key_path, os.O_WRONLY | os.O_CREAT, 0o400), "w+") as handle:
handle.write(private_key)
create_key_pair()
- The above code will create AWS Key pair & change its permission to 400
Step 4: Create an EC2 instance
- Add the below code in
aws_
ec2.py
file
# create EC2 Instance
def create_instance():
instances = ec2_client.run_instances(
ImageId = ami_id,
MinCount = 1,
MaxCount = 1,
InstanceType = instance_type,
KeyName = key_name
)
instance_id = instances["Instances"][0]["InstanceId"]
print(instance_id)
if "ec2_instance_ids" in ec2_data:
ec2_data["ec2_instance_ids"].append(instance_id)
else:
ec2_data["ec2_instance_ids"] = [instance_id]
create_instance()
The above code will create a single EC2 instance on AWS
In order to create multiple instances at the same time you can add the below code
for x in range(5):
create_instance()
The above code will create 5 EC2 instances
The instance id of all the instances will store in the
ec2_data.json
file
Step 5: Get the public & private IP of an EC2 instance
# getting the public IP of instance
def get_public_ip(instance_id):
reservations = ec2_client.describe_instances(InstanceIds=[instance_id]).get("Reservations")
for reservation in reservations:
for instance in reservation['Instances']:
print(instance.get("PublicIpAddress"))
get_public_ip("<instance-id>")
# getting the private IP of instance
def get_private_ip(instance_id):
reservations = ec2_client.describe_instances(InstanceIds=[instance_id]).get("Reservations")
for reservation in reservations:
for instance in reservation['Instances']:
print(instance.get("PrivateIpAddress"))
get_private_ip("<instance-id>")
- Replace the value of instance-id & run the code you will get the public & private IP of an EC2 instance
Step 6: Get the list of all running instances
# getting all running instances
def get_running_instances():
reservations = ec2_client.describe_instances(Filters=[
{
"Name": "instance-state-name",
"Values": ["running"],
}
]).get("Reservations")
for reservation in reservations:
for instance in reservation["Instances"]:
instance_id = instance["InstanceId"]
instance_type = instance["InstanceType"]
public_ip = instance["PublicIpAddress"]
private_ip = instance["PrivateIpAddress"]
print(f"{instance_id}, {instance_type}, {public_ip}, {private_ip}")
get_running_instances()
- Run the above code & you will get the instance id, instance type, and public & private IP addresses of all the running instances
Step 7: Reboot, Stop, Start & Terminate the instance
# reboot instance
def reboot_instance(instance_id):
response = ec2_client.reboot_instances(InstanceIds=[instance_id])
print(response)
reboot_instance("<instance-id>")
# stop instance
def stop_instance(instance_id):
response = ec2_client.stop_instances(InstanceIds=[instance_id])
print(response)
reboot_instance("<instance-id>")
# start instance
def start_instance(instance_id):
response = ec2_client.start_instances(InstanceIds=[instance_id])
print(response)
start_instance("<instance-id>")
# terminate instance
def terminate_instance(instance_id):
response = ec2_client.terminate_instances(InstanceIds=[instance_id])
print(response)
ec2_data["ec2_instance_ids"].remove(instance_id)
terminate_instance("<instance-id>")
Let’s work with AWS S3:
Now let’s work an AWS S3 using Boto3 and play with it
Create a file called
aws_
s3.py
Step 1: Create the S3 bucket
- Add the below code in
aws_
s3.py
import os
import logging
import json
import boto3
from botocore.exceptions import ClientError
from boto3.s3.transfer import TransferConfig
import requests
bucket_name = "dhsoni-boto3"
region_name = "us-east-2"
# creating bucket
def create_bucket(bucket_name, region=None):
try:
if region is None:
s3_client = boto3.client('s3')
s3_client.create_bucket(Bucket=bucket_name)
else:
s3_client = boto3.client('s3', region_name=region)
location = {'LocationConstraint': region}
s3_client.create_bucket(Bucket=bucket_name,
CreateBucketConfiguration=location)
except ClientError as e:
logging.error(e)
return False
return True
create_bucket(bucket_name, region_name)
- Run the above code and it will create the S3 bucket called
dhsoni-boto3
Step 2: List all the buckets
# listing bucket
def list_buckets( region=None):
s3_client = boto3.client('s3')
try:
if region is not None:
s3_client = boto3.client('s3', region_name=region)
response = s3_client.list_buckets()
print('Existing buckets:')
for bucket in response['Buckets']:
print(f' {bucket["Name"]}')
except ClientError as e:
logging.error(e)
return False
return True
list_buckets(region_name)
- Run the above code and it will list all the buckets
Step 3: Upload a file to the S3 bucket
- Create the file called
sample_file.txt
and add some random context to it
# uploading a file to bucket
def upload_file(file_name, bucket, object_name=None):
if object_name is None:
object_name = os.path.basename(file_name)
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
except ClientError as e:
logging.error(e)
return False
return True
upload_file("./sample_file.txt", bucket_name, "sample_file.txt")
- Run the above code and it will upload the file to the S3 bucket
Step 4: Upload a file object
- Create the file called
sample_file_2.txt
and add some random context to it
# uploading a file object to bucket
def upload_file_object(file_name, bucket, object_name=None):
if object_name is None:
object_name = os.path.basename(file_name)
# Upload the file
s3_client = boto3.client('s3')
try:
with open(file_name, "rb") as f:
s3_client.upload_fileobj(f, bucket, object_name)
except ClientError as e:
logging.error(e)
return False
return True
upload_file_object("./sample_file_2.txt", bucket_name, "sample_file_2.txt")
- Run the above code and it will upload the file object to the S3 bucket
Step 5: Delete the empty bucket
- In order to delete the empty bucket we first need to create the empty bucket, run the below code for that
create_bucket("dhsoni-empty-bucket", region_name)
- Now in order to delete it run the below code
# deleting empty bucket
def delete_empty_bucket(bucket):
s3_client = boto3.client('s3')
response = s3_client.delete_bucket(Bucket=bucket)
print(response)
delete_empty_bucket("dhsoni-empty-bucket")
Step 6: Delete the object from the bucket
# deleting object from bucket
def delete_object(bucket,object_name):
s3_client = boto3.client('s3')
response = s3_client.delete_object(Bucket=bucket,Key=object_name)
delete_object(bucket_name, "sample_file_2.txt")
- Run the above code and it will delete
sample_file_2.txt
fromdhsoni-boto3
bucket
Step 7: Deleting a non-empty bucket
# deleting non empty bucket
def delete_non_empty_bucket(bucket):
s3_client = s3 = boto3.resource('s3')
bucketClient = s3_client.Bucket(bucket)
bucketClient.objects.all().delete()
bucketClient.meta.client.delete_bucket(Bucket=bucket)
delete_non_empty_bucket(bucket_name)
- Run the above code and it will delete the
dhsoni-boto3
bucket
Step 8: Downloading the file from the bucket
# downloading file from bucket
def download_file(file_name, bucket, object_name):
s3_client = boto3.client('s3')
try:
response = s3_client.download_file(bucket, object_name, file_name)
except ClientError as e:
logging.error(e)
return False
return True
download_file("./downloaded_files/sample_file.txt", bucket_name, "sample_file.txt")
- Run the above code and it will download the
sample_file.txt
and it will place it atdownloaded_files
the folder which I have already created
Now, Let’s work with AWS IAM:
Now let’s work an AWS IAM using Boto3 and play with it
Create
aws_
iam.py
file
Step 1: Create an AWS IAM user
import json
import boto3
from botocore.exceptions import ClientError
# creating IAM User
def create_iam_user(user_name):
try:
iam_client = boto3.client('iam')
response = iam_client.create_user(UserName=user_name)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
return False
else:
print("Unexpected error: %s" % e)
return False
return response
responseObject = create_iam_user("DhsoniTest")
print(responseObject)
- Run the above code and it will create
DhsoniTest
user on AWS
Step 2: List all IAM users
# listing all IAM users
def list_iam_users():
try:
iam_client = boto3.client('iam')
paginator = iam_client.get_paginator('list_users')
for response in paginator.paginate():
#print(response["Users"])
for user in response["Users"]:
print("User name: ",user["UserName"])
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
list_iam_users()
- Run the above code and it will list all the IAM users
Step 3: Update the IAM user
# upading IAM user
def update_iam_user(existing_user_name, new_user_name):
try:
iam_client = boto3.client('iam')
iam_client.update_user(UserName=existing_user_name,
NewUserName=new_user_name)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
update_iam_user("DhsoniTest", "DhsoniTest1")
list_iam_users()
- Run the above code and it will change the name of the IAM user from
DhsoniTest
toDhsoniTest1
Step 5: Create an IAM policy
# creating IAM policy
def create_iam_policy(policy_name, policy_json):
try:
iam_client = boto3.client('iam')
iam_client.create_policy(
PolicyName=policy_name,
PolicyDocument=json.dumps(policy_json)
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
return False
else:
print("Unexpected error: %s" % e)
return False
return True
custom_policy_json = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": [
"ec2:*"
],
"Resource": "*"
}]
}
create_iam_policy("test_policy_1_by_dhruvin", custom_policy_json)
- Run the above code and it will create the IAM policy with all the access to the AWS EC2 instance
Step 6: Attaching IAM policy to IAM user
# attaching IAM policy to user
def attach_custom_iam_policy_with_user(policy_name, user_name):
try:
sts = boto3.client('sts')
account_id = sts.get_caller_identity()['Account']
policy_arn = f'arn:aws:iam::{account_id}:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.attach_user_policy(
UserName=user_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
attach_custom_iam_policy_with_user("test_policy_1_by_dhruvin", "DhsoniTest1")
- Run the above code and it will attach the IAM policy
test_policy_1_by_dhruvin
to the IAM roleDhsoniTest1
Step 7: Attach AWS-managed IAM policy to IAM user
# attaching AWS managed IAM policy
def attach_managed_iam_policy_with_user(policy_name, user_name):
try:
sts = boto3.client('sts')
policy_arn = f'arn:aws:iam::aws:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.attach_user_policy(
UserName=user_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
attach_managed_iam_policy_with_user("AdministratorAccess", "DhsoniTest1")
- Run the above code and it will attach
AdministratorAccess
IAM policy toDhsoniTest1
user
Step 8: Detach AWS managed & custom IAM policy
# detaching IAM policy from user
def detach_custom_iam_policy_with_user(policy_name, user_name):
try:
sts = boto3.client('sts')
account_id = sts.get_caller_identity()['Account']
policy_arn = f'arn:aws:iam::{account_id}:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.detach_user_policy(
UserName=user_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
detach_custom_iam_policy_with_user("test_policy_1_by_dhruvin", "DhsoniTest1")
# detaching AWS managed IAM policy
def detach_managed_iam_policy_with_user(policy_name, user_name):
try:
sts = boto3.client('sts')
policy_arn = f'arn:aws:iam::aws:policy/{policy_name}'
iam_client = boto3.client('iam')
iam_client.detach_user_policy(
UserName=user_name,
PolicyArn=policy_arn
)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
else:
print("Unexpected error: %s" % e)
detach_managed_iam_policy_with_user("AdministratorAccess", "DhsoniTest1")
- Run the above code and it will detach AWS managed & custom IAM policy from the user
DhsoniTest1
Step 9: Delete the IAM user
# deleting IAM User
def delete_iam_user(user_name):
try:
iam_client = boto3.client('iam')
response = iam_client.delete_user(UserName=user_name)
except ClientError as e:
if e.response['Error']['Code'] == 'EntityAlreadyExists':
print("Object already exists")
return False
else:
print("Unexpected error: %s" % e)
return False
return response
responseObject = delete_iam_user("DhsoniTest1")
print(responseObject)