Skip to content

Commit 3ef6d91

Browse files
authored
Merge pull request #2 from Snowflake-Labs/initial-release-branch
Initial release branch
2 parents cb3984f + 0b7ab37 commit 3ef6d91

File tree

3 files changed

+344
-0
lines changed

3 files changed

+344
-0
lines changed

.DS_Store

0 Bytes
Binary file not shown.

SiS Deployment/deploy_app.ipynb

Lines changed: 304 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,304 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"metadata": {},
6+
"source": [
7+
"# Deploy App to Snowflake"
8+
]
9+
},
10+
{
11+
"cell_type": "code",
12+
"execution_count": 1,
13+
"metadata": {},
14+
"outputs": [],
15+
"source": [
16+
"from snowflake.snowpark import Session\n",
17+
"from string import Template\n",
18+
"import json"
19+
]
20+
},
21+
{
22+
"cell_type": "markdown",
23+
"metadata": {},
24+
"source": [
25+
"### Connect to Snowflake"
26+
]
27+
},
28+
{
29+
"cell_type": "markdown",
30+
"metadata": {},
31+
"source": [
32+
"You can create a session however you like. Here are two possible options. "
33+
]
34+
},
35+
{
36+
"cell_type": "markdown",
37+
"metadata": {},
38+
"source": [
39+
"OPTION 1 - Using builder.getOrCreate() to access an existing toml file\n",
40+
"- https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect#connecting-using-the-connections-toml-file"
41+
]
42+
},
43+
{
44+
"cell_type": "code",
45+
"execution_count": 2,
46+
"metadata": {},
47+
"outputs": [],
48+
"source": [
49+
"session = Session.builder.getOrCreate()"
50+
]
51+
},
52+
{
53+
"cell_type": "markdown",
54+
"metadata": {},
55+
"source": [
56+
"OPTION 2 - Using connection params inside builder.configs().create() "
57+
]
58+
},
59+
{
60+
"cell_type": "code",
61+
"execution_count": 3,
62+
"metadata": {},
63+
"outputs": [],
64+
"source": [
65+
"# connection_params = dict(\n",
66+
"# user=\"\",\n",
67+
"# role=\"\",\n",
68+
"# password=\"\",\n",
69+
"# account=\"\",\n",
70+
"# )\n",
71+
"\n",
72+
"# session = Session.builder.configs(connection_params).create()"
73+
]
74+
},
75+
{
76+
"cell_type": "markdown",
77+
"metadata": {},
78+
"source": [
79+
"### Establish metadata\n",
80+
"\n",
81+
"NOTE: In the metadata dict below, you can change any of the key:value pairs EXCEPT main_file=\"automl_app.py\"."
82+
]
83+
},
84+
{
85+
"cell_type": "code",
86+
"execution_count": 4,
87+
"metadata": {},
88+
"outputs": [],
89+
"source": [
90+
"metadata = dict(\n",
91+
" database_name=\"ML_SIDEKICK\",\n",
92+
" schema_name=\"ST_APPS\",\n",
93+
" stage_name=\"APP_STG\",\n",
94+
" app_name=\"ML_SIDEKICK\",\n",
95+
" main_file=\"automl_app.py\", # DO NOT CHANGE\n",
96+
" query_warehouse=\"COMPUTE_WH\", # CHANGE TO AN EXISTING WAREHOUSE\n",
97+
")\n",
98+
"with open(\"deployment_structure.json\", \"r\") as config:\n",
99+
" upload_metadata = json.loads(config.read())"
100+
]
101+
},
102+
{
103+
"cell_type": "markdown",
104+
"metadata": {},
105+
"source": [
106+
"### Templates"
107+
]
108+
},
109+
{
110+
"cell_type": "code",
111+
"execution_count": 12,
112+
"metadata": {},
113+
"outputs": [],
114+
"source": [
115+
"create_db = Template(\"CREATE DATABASE IF NOT EXISTS $db\")\n",
116+
"create_schema = Template(\"CREATE SCHEMA IF NOT EXISTS $db.$schema\")\n",
117+
"create_stage = Template(\n",
118+
" \"\"\"\n",
119+
"CREATE STAGE IF NOT EXISTS $db.$schema.$stage\n",
120+
"DIRECTORY=(ENABLE=TRUE);\n",
121+
"\"\"\"\n",
122+
")\n",
123+
"create_streamlit = Template(\n",
124+
" \"\"\"CREATE STREAMLIT IF NOT EXISTS $db.$schema.$app_name\n",
125+
" ROOT_LOCATION = '@$db.$schema.$stage'\n",
126+
" MAIN_FILE = '$main_file'\n",
127+
" QUERY_WAREHOUSE = $wh\n",
128+
" COMMENT = '{\"origin\":\"sf_sit\", \"name\":\"ml_sidekick\", \"version\":{\"major\":1, \"minor\":0}, \"attributes\":{\"component\":\"sis_app\"}}'\n",
129+
" \"\"\"\n",
130+
")"
131+
]
132+
},
133+
{
134+
"cell_type": "markdown",
135+
"metadata": {},
136+
"source": [
137+
"##### Populate Templates"
138+
]
139+
},
140+
{
141+
"cell_type": "code",
142+
"execution_count": 13,
143+
"metadata": {},
144+
"outputs": [],
145+
"source": [
146+
"db_query = create_db.substitute(db=metadata.get(\"database_name\"))\n",
147+
"\n",
148+
"schema_qry = create_schema.substitute(\n",
149+
" db=metadata.get(\"database_name\"), schema=metadata.get(\"schema_name\")\n",
150+
")\n",
151+
"\n",
152+
"stage_qry = create_stage.substitute(\n",
153+
" db=metadata.get(\"database_name\"),\n",
154+
" schema=metadata.get(\"schema_name\"),\n",
155+
" stage=metadata.get(\"stage_name\"),\n",
156+
")\n",
157+
"\n",
158+
"app_create_qry = create_streamlit.substitute(\n",
159+
" app_name=metadata.get(\"app_name\"),\n",
160+
" db=metadata.get(\"database_name\"),\n",
161+
" schema=metadata.get(\"schema_name\"),\n",
162+
" stage=metadata.get(\"stage_name\"),\n",
163+
" main_file=metadata.get(\"main_file\"),\n",
164+
" wh=metadata.get(\"query_warehouse\"),\n",
165+
")"
166+
]
167+
},
168+
{
169+
"cell_type": "code",
170+
"execution_count": 7,
171+
"metadata": {},
172+
"outputs": [
173+
{
174+
"data": {
175+
"text/plain": [
176+
"[Row(status='AUTO_ML already exists, statement succeeded.')]"
177+
]
178+
},
179+
"execution_count": 7,
180+
"metadata": {},
181+
"output_type": "execute_result"
182+
}
183+
],
184+
"source": [
185+
"session.sql(db_query).collect()"
186+
]
187+
},
188+
{
189+
"cell_type": "code",
190+
"execution_count": 8,
191+
"metadata": {},
192+
"outputs": [
193+
{
194+
"data": {
195+
"text/plain": [
196+
"[Row(status='ST_APPS already exists, statement succeeded.')]"
197+
]
198+
},
199+
"execution_count": 8,
200+
"metadata": {},
201+
"output_type": "execute_result"
202+
}
203+
],
204+
"source": [
205+
"session.sql(schema_qry).collect()"
206+
]
207+
},
208+
{
209+
"cell_type": "code",
210+
"execution_count": 9,
211+
"metadata": {},
212+
"outputs": [
213+
{
214+
"data": {
215+
"text/plain": [
216+
"[Row(status='APP_STG already exists, statement succeeded.')]"
217+
]
218+
},
219+
"execution_count": 9,
220+
"metadata": {},
221+
"output_type": "execute_result"
222+
}
223+
],
224+
"source": [
225+
"session.sql(stage_qry).collect()"
226+
]
227+
},
228+
{
229+
"cell_type": "markdown",
230+
"metadata": {},
231+
"source": [
232+
"### Upload project files"
233+
]
234+
},
235+
{
236+
"cell_type": "code",
237+
"execution_count": 10,
238+
"metadata": {},
239+
"outputs": [],
240+
"source": [
241+
"db = metadata.get(\"database_name\")\n",
242+
"schema = metadata.get(\"schema_name\")\n",
243+
"stage = metadata.get(\"stage_name\")\n",
244+
"for i in upload_metadata.get(\"files\"):\n",
245+
" for file in i.get(\"files\"):\n",
246+
" path = \"\" if i.get(\"parent\") == \"root\" else i.get(\"parent\")\n",
247+
" session.file.put(\n",
248+
" local_file_name=file,\n",
249+
" stage_location=f\"@{db}.{schema}.{stage}/{path}\",\n",
250+
" auto_compress=False,\n",
251+
" overwrite=True,\n",
252+
" )"
253+
]
254+
},
255+
{
256+
"cell_type": "markdown",
257+
"metadata": {},
258+
"source": [
259+
"# Finally, Create the app"
260+
]
261+
},
262+
{
263+
"cell_type": "code",
264+
"execution_count": 14,
265+
"metadata": {},
266+
"outputs": [
267+
{
268+
"data": {
269+
"text/plain": [
270+
"[Row(status='Streamlit STREAMLIT_AUTO_ML successfully created.')]"
271+
]
272+
},
273+
"execution_count": 14,
274+
"metadata": {},
275+
"output_type": "execute_result"
276+
}
277+
],
278+
"source": [
279+
"session.sql(app_create_qry).collect()"
280+
]
281+
}
282+
],
283+
"metadata": {
284+
"kernelspec": {
285+
"display_name": "streamlit-automl",
286+
"language": "python",
287+
"name": "python3"
288+
},
289+
"language_info": {
290+
"codemirror_mode": {
291+
"name": "ipython",
292+
"version": 3
293+
},
294+
"file_extension": ".py",
295+
"mimetype": "text/x-python",
296+
"name": "python",
297+
"nbconvert_exporter": "python",
298+
"pygments_lexer": "ipython3",
299+
"version": "3.10.11"
300+
}
301+
},
302+
"nbformat": 4,
303+
"nbformat_minor": 2
304+
}
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
{
2+
"files": [
3+
{
4+
"parent": "root",
5+
"files": [
6+
"../streamlit_automl/automl_app.py",
7+
"../streamlit_automl/callbacks.py",
8+
"../streamlit_automl/cli.py",
9+
"../streamlit_automl/code_exporter.py",
10+
"../streamlit_automl/common.py",
11+
"../streamlit_automl/histograms.py",
12+
"../streamlit_automl/ml_modeling.py",
13+
"../streamlit_automl/ml_ops.py",
14+
"../streamlit_automl/model_metrics.py",
15+
"../streamlit_automl/preprocessing.py",
16+
"../streamlit_automl/utils.py",
17+
"../streamlit_automl/environment.yml"
18+
]
19+
},
20+
{
21+
"parent": "styles",
22+
"files": [
23+
"../streamlit_automl/styles/css_bootstrap.html"
24+
]
25+
},
26+
{
27+
"parent": "resources",
28+
"files": [
29+
"../streamlit_automl/resources/background.png",
30+
"../streamlit_automl/resources/db.svg",
31+
"../streamlit_automl/resources/glaciers_back.png",
32+
"../streamlit_automl/resources/loader.gif",
33+
"../streamlit_automl/resources/Snowflake_ICON_Alert.png",
34+
"../streamlit_automl/resources/Snowflake_ICON_Chat.png",
35+
"../streamlit_automl/resources/Snowflake_ICON_Check.png"
36+
]
37+
}
38+
39+
]
40+
}

0 commit comments

Comments
 (0)