DBT cloud python script execute

from urllib.parse import quote_plus as urlquote
from snowflake.connector.pandas_tools import pd_writer
from sqlalchemy import create_engine
import logging
import requests
import pandas as pd
from datetime import datetime

Set Snowflake connection parameters

user = ‘your_user_name’
password = ‘your_password’
database = ‘your_database_name’
warehouse = ‘your_warehouse_name’
account = ‘your_account_name’
role = ‘your_role_name’

conn_string = f’snowflake://{user}:{urlquote(password)}@{account}/{database}?warehouse={warehouse}&role={role}’

Create a connection to Snowflake

engine = create_engine(conn_string)
with engine.connect() as con:
print(‘connected’)

# Get data from xml and convert to dataframe
r = requests.get('https://rp.railcl.com/RAILPort/report/data/28e3d782-e0ca-4e19-8573-09002e8e9c69?p=e')
df_railtrac = pd.read_xml(r.content)

# Fix column names and push to Snowflake with the load_date field
fix_cols = [col.lower().replace('_x0020','').replace('x002f','').replace('.','') for col in df_railtrac.columns]
print(fix_cols)
df_railtrac.columns = fix_cols
df_railtrac['load_date'] = datetime.now()

df_railtrac.to_sql(name='s_railtrac_livelink', con=con, if_exists='append', index=False)

I am executing this script using Jupyter notebook load xml data. is it possible to implement same in dbt cloud python