@@ -184,212 +184,3 @@ class FILL_IN:
184
184
185
185
classroomCleanup (username , moduleName , lessonName , False )
186
186
187
- # COMMAND ----------
188
-
189
- # MAGIC %scala
190
- # MAGIC //*******************************************
191
- # MAGIC // TAG API FUNCTIONS
192
- # MAGIC //*******************************************
193
- # MAGIC
194
- # MAGIC // Get all tags
195
- # MAGIC def getTags(): Map[com.databricks.logging.TagDefinition,String] = {
196
- # MAGIC com.databricks.logging.AttributionContext.current.tags
197
- # MAGIC }
198
- # MAGIC
199
- # MAGIC // Get a single tag's value
200
- # MAGIC def getTag(tagName: String, defaultValue: String = null): String = {
201
- # MAGIC val values = getTags().collect({ case (t, v) if t.name == tagName => v }).toSeq
202
- # MAGIC values.size match {
203
- # MAGIC case 0 => defaultValue
204
- # MAGIC case _ => values.head.toString
205
- # MAGIC }
206
- # MAGIC }
207
- # MAGIC
208
- # MAGIC //*******************************************
209
- # MAGIC // USER, USERNAME, AND USERHOME FUNCTIONS
210
- # MAGIC //*******************************************
211
- # MAGIC
212
- # MAGIC // Get the user's username
213
- # MAGIC def getUsername(): String = {
214
- # MAGIC return try {
215
- # MAGIC dbutils.widgets.get("databricksUsername")
216
- # MAGIC } catch {
217
- # MAGIC case _: Exception => getTag("user", java.util.UUID.randomUUID.toString.replace("-", ""))
218
- # MAGIC }
219
- # MAGIC }
220
- # MAGIC
221
- # MAGIC // Get the user's userhome
222
- # MAGIC def getUserhome(): String = {
223
- # MAGIC val username = getUsername()
224
- # MAGIC return s"dbfs:/user/$username"
225
- # MAGIC }
226
- # MAGIC
227
- # MAGIC def getModuleName(): String = {
228
- # MAGIC // This will/should fail if module-name is not defined in the Classroom-Setup notebook
229
- # MAGIC return spark.conf.get("com.databricks.training.module-name")
230
- # MAGIC }
231
- # MAGIC
232
- # MAGIC def getLessonName(): String = {
233
- # MAGIC // If not specified, use the notebook's name.
234
- # MAGIC return dbutils.notebook.getContext.notebookPath.get.split("/").last
235
- # MAGIC }
236
- # MAGIC
237
- # MAGIC def getWorkingDir(): String = {
238
- # MAGIC val langType = "s" // for scala
239
- # MAGIC val lessonName = getLessonName().replaceAll("[^a-zA-Z0-9]", "_")
240
- # MAGIC val moduleName = getModuleName().replaceAll("[^a-zA-Z0-9]", "_")
241
- # MAGIC val userhome = getUserhome()
242
- # MAGIC return f"${userhome}/${moduleName}/${lessonName}/${langType}".replace("__", "_").replace("__", "_").replace("__", "_").replace("__", "_").toLowerCase()
243
- # MAGIC }
244
- # MAGIC
245
- # MAGIC //**********************************
246
- # MAGIC // USER DATABASE FUNCTIONS
247
- # MAGIC //**********************************
248
- # MAGIC
249
- # MAGIC def getDatabaseName(username:String, moduleName:String, lessonName:String):String = {
250
- # MAGIC val user = username.replaceAll("[^a-zA-Z0-9]", "")
251
- # MAGIC val module = moduleName.replaceAll("[^a-zA-Z0-9]", "_")
252
- # MAGIC val lesson = lessonName.replaceAll("[^a-zA-Z0-9]", "_")
253
- # MAGIC val langType = "scala" // for scala
254
- # MAGIC val databaseName = f"${user}_${module}_${lesson}_${langType}".toLowerCase
255
- # MAGIC return databaseName
256
- # MAGIC }
257
- # MAGIC
258
- # MAGIC // Create a user-specific database
259
- # MAGIC def createUserDatabase(username:String, moduleName:String, lessonName:String):String = {
260
- # MAGIC val databaseName = getDatabaseName(username, moduleName, lessonName)
261
- # MAGIC
262
- # MAGIC spark.sql("CREATE DATABASE IF NOT EXISTS %s".format(databaseName))
263
- # MAGIC spark.sql("USE %s".format(databaseName))
264
- # MAGIC
265
- # MAGIC return databaseName
266
- # MAGIC }
267
- # MAGIC
268
- # MAGIC // ****************************************************************************
269
- # MAGIC // Utility method to determine whether a path exists
270
- # MAGIC // ****************************************************************************
271
- # MAGIC
272
- # MAGIC def pathExists(path:String):Boolean = {
273
- # MAGIC try {
274
- # MAGIC dbutils.fs.ls(path)
275
- # MAGIC return true
276
- # MAGIC } catch{
277
- # MAGIC case e: Exception => return false
278
- # MAGIC }
279
- # MAGIC }
280
- # MAGIC
281
- # MAGIC // ****************************************************************************
282
- # MAGIC // Utility method for recursive deletes
283
- # MAGIC // Note: dbutils.fs.rm() does not appear to be truely recursive
284
- # MAGIC // ****************************************************************************
285
- # MAGIC
286
- # MAGIC def deletePath(path:String):Unit = {
287
- # MAGIC val files = dbutils.fs.ls(path)
288
- # MAGIC
289
- # MAGIC for (file <- files) {
290
- # MAGIC val deleted = dbutils.fs.rm(file.path, true)
291
- # MAGIC
292
- # MAGIC if (deleted == false) {
293
- # MAGIC if (file.isDir) {
294
- # MAGIC deletePath(file.path)
295
- # MAGIC } else {
296
- # MAGIC throw new java.io.IOException("Unable to delete file: " + file.path)
297
- # MAGIC }
298
- # MAGIC }
299
- # MAGIC }
300
- # MAGIC
301
- # MAGIC if (dbutils.fs.rm(path, true) == false) {
302
- # MAGIC throw new java.io.IOException("Unable to delete directory: " + path)
303
- # MAGIC }
304
- # MAGIC }
305
- # MAGIC
306
- # MAGIC // ****************************************************************************
307
- # MAGIC // Utility method to clean up the workspace at the end of a lesson
308
- # MAGIC // ****************************************************************************
309
- # MAGIC
310
- # MAGIC def classroomCleanup(username:String, moduleName:String, lessonName:String, dropDatabase: Boolean):Unit = {
311
- # MAGIC
312
- # MAGIC // Stop any active streams
313
- # MAGIC for (stream <- spark.streams.active) {
314
- # MAGIC try {
315
- # MAGIC stream.stop()
316
- # MAGIC stream.awaitTermination()
317
- # MAGIC } catch {
318
- # MAGIC // Bury any exceptions arising from stopping the stream
319
- # MAGIC case _: Exception => ()
320
- # MAGIC }
321
- # MAGIC }
322
- # MAGIC
323
- # MAGIC // Drop the tables only from specified database
324
- # MAGIC val database = getDatabaseName(username, moduleName, lessonName)
325
- # MAGIC try {
326
- # MAGIC val tables = spark.sql(s"show tables from $database").select("tableName").collect()
327
- # MAGIC for (row <- tables){
328
- # MAGIC var tableName = row.getAs[String]("tableName")
329
- # MAGIC spark.sql("drop table if exists %s.%s".format(database, tableName))
330
- # MAGIC
331
- # MAGIC // In some rare cases the files don't actually get removed.
332
- # MAGIC Thread.sleep(1000) // Give it just a second...
333
- # MAGIC val hivePath = "dbfs:/user/hive/warehouse/%s.db/%s".format(database, tableName)
334
- # MAGIC dbutils.fs.rm(hivePath, true) // Ignoring the delete's success or failure
335
- # MAGIC
336
- # MAGIC }
337
- # MAGIC } catch {
338
- # MAGIC case _: Exception => () // ignored
339
- # MAGIC }
340
- # MAGIC
341
- # MAGIC // Remove files created from previous runs
342
- # MAGIC val path = getWorkingDir()
343
- # MAGIC if (pathExists(path)) {
344
- # MAGIC deletePath(path)
345
- # MAGIC }
346
- # MAGIC
347
- # MAGIC // Drop the database if instructed to
348
- # MAGIC if (dropDatabase){
349
- # MAGIC spark.sql(s"DROP DATABASE IF EXISTS $database CASCADE")
350
- # MAGIC
351
- # MAGIC // In some rare cases the files don't actually get removed.
352
- # MAGIC Thread.sleep(1000) // Give it just a second...
353
- # MAGIC val hivePath = "dbfs:/user/hive/warehouse/%s.db".format(database)
354
- # MAGIC dbutils.fs.rm(hivePath, true) // Ignoring the delete's success or failure
355
- # MAGIC
356
- # MAGIC displayHTML("Dropped database and removed files in working directory")
357
- # MAGIC }
358
- # MAGIC }
359
- # MAGIC
360
- # MAGIC // ****************************************************************************
361
- # MAGIC // Utility method to delete a database
362
- # MAGIC // ****************************************************************************
363
- # MAGIC
364
- # MAGIC def deleteTables(database:String):Unit = {
365
- # MAGIC spark.sql("DROP DATABASE IF EXISTS %s CASCADE".format(database))
366
- # MAGIC }
367
- # MAGIC
368
- # MAGIC // ****************************************************************************
369
- # MAGIC // Placeholder variables for coding challenge type specification
370
- # MAGIC // ****************************************************************************
371
- # MAGIC object FILL_IN {
372
- # MAGIC val VALUE = null
373
- # MAGIC val ARRAY = Array(Row())
374
- # MAGIC val SCHEMA = org.apache.spark.sql.types.StructType(List())
375
- # MAGIC val ROW = Row()
376
- # MAGIC val LONG: Long = 0
377
- # MAGIC val INT: Int = 0
378
- # MAGIC def DATAFRAME = spark.emptyDataFrame
379
- # MAGIC def DATASET = spark.createDataset(Seq(""))
380
- # MAGIC }
381
- # MAGIC
382
- # MAGIC //**********************************
383
- # MAGIC // Set up student environment
384
- # MAGIC //**********************************
385
- # MAGIC
386
- # MAGIC val moduleName = getModuleName()
387
- # MAGIC val lessonName = getLessonName()
388
- # MAGIC val username = getUsername()
389
- # MAGIC val userhome = getUserhome()
390
- # MAGIC val workingDir = getWorkingDir()
391
- # MAGIC val databaseName = createUserDatabase(username, moduleName, lessonName)
392
- # MAGIC
393
- # MAGIC classroomCleanup(username, moduleName, lessonName, false)
394
- # MAGIC
395
- # MAGIC displayHTML("")
0 commit comments