Postman gives error 500 while trying to connect backend via an api

I made an application that deletes stuff in a s3 bucket.Now I need to bring it into the frotend via an api but postman gives me error 500.I think something is wrong with my api,but I can't figure it out.Is there anything that I'm missing in the api? This is my app.

import pymysql
import os
from dotenv import load_dotenv
import boto3
import pandas as pd
import requests
from datetime import datetime


def bulk_delete():

    load_dotenv()

    # MySQL connection
    db = pymysql.connect(
        host=os.getenv("DATABASE_HOST"),
        user=os.getenv("DATABASE_USER"),
        password=os.getenv("DATABASE_PASS"),
        database=os.getenv("DATABASE_NAME"),
    )
    # get todays date in correct format
    today = datetime.today()

    getdatevalue = (
        str(today.year)
        + "-"
        + str(today.month)
        + "-"
        + str(today.day)
        + " "
        + str(today.hour)
        + ":"
        + str(today.minute)
        + ":"
        + str(today.second)
    )

    # session setup
    session = boto3.Session(
        aws_access_key_id="/",
        aws_secret_access_key="/",
    )
    s3 = session.resource("s3")
    my_bucket = s3.Bucket("slm-bulk-delete")

    # get files from S3 Bucket
    for obj in my_bucket.objects.all():
        s3_file_name = obj.key
        download_f_name = os.path.join("", s3_file_name)
        my_bucket.download_file(Key=s3_file_name, Filename=download_f_name)

        df = pd.read_excel(open(download_f_name, "rb"))
        print(df)

        # cycle through files
        for row in df.iterrows():
            print(row[1]["Data_Source_ID"])
            # cycle through ID's in file and query to check that ID exists
            DataID = str(row[1]["Data_Source_ID"])

            querycheck = (
                "select Data_Source_ID from FPFA_Data "
                + "Where Data_Source_ID = "
                + "'"
                + DataID
                + "' "
                + "Limit 1"
            )
            # check this is a reference in the DB
            with db.cursor() as cur:
                cur.execute(querycheck)
                for item in cur:
                    print("we have a reference!")
                    # If the ID exists, run update statement to add timestamp end
                    with db.cursor() as curupdate:
                        queryupdate = (
                            "update FPFA_Data set TimeStamp_End = "
                            + "'"
                            + getdatevalue
                            + "' "
                            + "where Data_Source_ID = "
                            + "'"
                            + DataID
                            + "'"
                        )
                        curupdate.execute(queryupdate)
                        db.commit()

    # Move file to short term storage
    obj.put(Bucket="shortterm-storage", Key="BulkDelete/" + obj.key)

    # delete file from slm-bulk-delete
    obj.delete()

    # delete locally downloaded file
    os.remove(download_f_name)

    # commit and close the database connection
    db.commit()

This is my api.

@require_GET
def BulkDelete(request):
    # pass data from front
    def run_bulk_delete():
        filename = request.GET.get("filename")
        return filename

    def run_code():
        # imported get_file_from_s3
        from .BulkDelete import bulk_delete

        filename = run_bulk_delete()
        bulk_delete()

        return {"filename": filename, "status": "files successfully deleted"}

    result = run_code()

    return JsonResponse(result, safe=False)

This is my url in urls.py

path("bulk_delete/", views.BulkDelete)
How many English words
do you know?
Test your English vocabulary size, and measure
how many words do you know
Online Test
Powered by Examplum