# By: Riasat Ullah
# This file contains tools to work with s3 data.

from io import BytesIO
import boto3
import io
import json
import yaml


s3 = boto3.resource('s3')


def read_json(bucket, key):
    '''
    Reads a s3 file as json.
    :param bucket: s3 bucket
    :param key: s3 file path
    :return: json
    '''
    obj = s3.Object(bucket, key)
    return json.loads(obj.get()['Body'].read().decode('utf-8'))


def read_image(bucket, key):
    '''
    Reads a s3 image file and returns a BytesIO object.
    :param bucket: s3 bucket
    :param key: s3 file path
    :return: BytesIO
    '''
    obj = s3.Object(bucket, key)
    return BytesIO(obj.get()['Body'].read())


def read_audio(bucket, key):
    '''
    Reads a s3 audio file and returns a BytesIO object.
    :param bucket: s3 bucket
    :param key: s3 file path
    :return: BytesIO
    '''
    obj = s3.Object(bucket, key)
    return BytesIO(obj.get()['Body'].read())


def read_text(bucket, key):
    '''
    Reads a s3 file as string/text.
    :param bucket: s3 bucket
    :param key: s3 file path
    :return: json
    '''
    obj = s3.Object(bucket, key)
    return obj.get()['Body'].read().decode('utf-8')


def read_yaml(bucket, key):
    '''
    Reads a yaml file stored in s3.
    :param bucket: s3 bucket
    :param key: s3 file path
    :return: dict
    '''
    obj = s3.Object(bucket, key)
    return yaml.load(obj.get()['Body'].read().decode('utf-8'), yaml.SafeLoader)


def read_bytes_io(bucket, key):
    '''
    This function returns bytes IO from a boto stream.
    :param bucket: s3 bucket
    :param key: s3 file path
    :return: _io.BytesIO
    '''
    s3_client = boto3.resource('s3')
    obj = s3_client.Object(bucket, key)
    return io.BytesIO(obj.get()['Body'].read())


def upload_media_file(bucket, key, media_file):
    '''
    Uploads a media file to s3.
    :param bucket: s3 bucket
    :param key: s3 file path
    :param media_file: bytes or stream of the media file
    '''
    s3.Bucket(bucket).put_object(Key=key, Body=media_file)


def list_bucket_keys(bucket, key, max_keys=1000):
    '''
    List the available paths under a key in a bucket.
    :param bucket: s3 bucket
    :param key: key path
    :param max_keys: maximum number of rows to fetch
    :return: (list) of s3 key paths
    '''
    s3_client = boto3.client('s3')
    if key[-1] != '/':
        key = key + '/'

    response = s3_client.list_objects(Bucket=bucket, Prefix=key, MaxKeys=max_keys)
    if 'Contents' in response:
        return [item['Key'] for item in response['Contents'] if item['Key'] != key]
    return []


def get_first_file_key(bucket, key):
    '''
    Get the key of the first file in a bucket and specific path.
    :param bucket: s3 bucket
    :param key: key path
    :return: (str) first fle key
    '''
    all_keys = list_bucket_keys(bucket, key)
    if len(all_keys) == 0:
        return None
    return all_keys[0]


def delete_single_file(bucket, key):
    '''
    Deletes a single file.
    :param bucket: s3 bucket
    :param key: file path (key)
    '''
    s3_client = boto3.client('s3')
    s3_client.delete_object(Bucket=bucket, Key=key)


def delete_folder_files(bucket, key, files_to_keep=None):
    '''
    Deletes all files in a s3 path (folder).
    :param bucket: s3 bucket
    :param key: file path (key)
    :param files_to_keep: files that should not be deleted
    '''
    s3_client = boto3.client('s3')
    if key[-1] != '/':
        key = key + '/'
    if files_to_keep is None:
        files_to_keep = []

    response = s3_client.list_objects(Bucket=bucket, Prefix=key)
    if 'Contents' in response:
        for item in response['Contents']:
            if item['Key'] != key and not item['Key'] in files_to_keep:
                s3_client.delete_object(Bucket=bucket, Key=item['Key'])


def update_json(bucket, key, data):
    '''
    Updates a json file.
    :param bucket: s3 bucket
    :param key: s3 file path
    :param data: (dict) of data
    '''
    try:
        s3_client = boto3.resource('s3')
        obj = s3_client.Object(bucket, key)
        obj.put(Body=json.dumps(data))
    except Exception as e:
        raise Exception(e)
