From a39e04ec657af03c64f76d795fd94f06572a9718 Mon Sep 17 00:00:00 2001 From: "ravi.gawai" Date: Tue, 19 Sep 2023 15:48:57 -0700 Subject: [PATCH] Adding Databricks Tech Summit Demo: 1.Creates test data for 100s of table 2.Creates onboarding files 3.Launch Bronze and Silver DLT using DLT-META --- dlt-meta-demo/README.md | 46 ++- .../demo/tech_summit_dlt_meta_runners.dbc | Bin 0 -> 6235 bytes dlt-meta-demo/launch_demo.py | 4 +- dlt-meta-demo/launch_techsummit_demo.py | 315 +++++++++++++++++ tech-summit-demo/README.md | 38 +++ tech-summit-demo/launch_demo.py | 320 ++++++++++++++++++ .../tech_summit_dlt_meta_runners.dbc | Bin 0 -> 6235 bytes 7 files changed, 719 insertions(+), 4 deletions(-) create mode 100644 dlt-meta-demo/demo/tech_summit_dlt_meta_runners.dbc create mode 100644 dlt-meta-demo/launch_techsummit_demo.py create mode 100644 tech-summit-demo/README.md create mode 100644 tech-summit-demo/launch_demo.py create mode 100644 tech-summit-demo/tech_summit_dlt_meta_runners.dbc diff --git a/dlt-meta-demo/README.md b/dlt-meta-demo/README.md index 6da8be2..9dcd142 100644 --- a/dlt-meta-demo/README.md +++ b/dlt-meta-demo/README.md @@ -1,5 +1,7 @@ - # DAIS 2023 [DLT-META](https://github.com/databrickslabs/dlt-meta) DEMO + # [DLT-META](https://github.com/databrickslabs/dlt-meta) DEMO's +## DAIS 2023 DEMO +This Demo will launch dlt-meta with 100s tables for bronze and silver DLT. Demo will create test tables dynamically and will launch for bronze and silver DLT pipelines. 1. Launch Terminal/Command promt 2. Install [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html) @@ -31,4 +33,44 @@ export DATABRICKS_TOKEN= # Paste Token here from Step#6, Accou 6. Run the command ```python launch_demo.py --cloud_provider_name=aws --dbr_version=12.2.x-scala2.12 --dbfs_path=dbfs:/dais-dlt-meta-demo-automated/``` - cloud_provider_name : aws or azure or gcp - db_version : Databricks Runtime Version - - dbfs_path : Path on your Databricks workspace where demo will be copied for launching DLT-META Pipelines \ No newline at end of file + - dbfs_path : Path on your Databricks workspace where demo will be copied for launching DLT-META Pipelines + +## Databricks Tech Summit FY2024 DEMO: +This demo will launch auto generated tables inside single bronze and silver DLT pipeline using dlt-meta. + +1. Launch Terminal/Command promt + +2. Install [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html) + +3. ```git clone https://github.com/databrickslabs/dlt-meta.git ``` + +4. ```cd dlt-meta/dlt-meta-demo``` + +5. Get DATABRICKS_HOST: + - Enter your workspace URL, with the format https://.cloud.databricks.com. To get your workspace URL, see Workspace instance names, URLs, and IDs. + +6. Generate DATABRICKS_TOKEN: + - In your Databricks workspace, click your Databricks username in the top bar, and then select User Settings from the drop down. + + - On the Access tokens tab, click Generate new token. + + - (Optional) Enter a comment that helps you to identify this token in the future, and change the token’s default lifetime of 90 days. To create a token with no lifetime (not recommended), leave the Lifetime (days) box empty (blank). + + - Click Generate. + + - Copy the displayed token + +7. Set environment variable into terminal +``` +export DATABRICKS_HOST= # Paste from Step#5 +export DATABRICKS_TOKEN= # Paste Token here from Step#6, Account needs permission to create clusters/dlt pipelines. +``` + +6. Run the command ```python launch_demo.py --cloud_provider_name=aws --dbr_version=12.2.x-scala2.12 --dbfs_path=dbfs:/tech-summit-dlt-meta-demo-automated/``` + - cloud_provider_name : aws or azure or gcp + - db_version : Databricks Runtime Version + - dbfs_path : Path on your Databricks workspace where demo will be copied for launching DLT-META Pipelines + - worker_nodes : (Optional) Provide number of worker nodes for data generation e.g 4 + - table_count : (Optional) Provide table count e.g 100 + - table_column_count : (Optional) Provide column count e.g 5 + - table_data_rows_count : (Optional) Provide data rows count e.g 10 \ No newline at end of file diff --git a/dlt-meta-demo/demo/tech_summit_dlt_meta_runners.dbc b/dlt-meta-demo/demo/tech_summit_dlt_meta_runners.dbc new file mode 100644 index 0000000000000000000000000000000000000000..1daca2c689e260f615812c34e1c55d21ad76e913 GIT binary patch literal 6235 zcmaKwWl&sOvxXr!1b26L9SH937GMZENN@{Gf_nxH79=>qoiGFl0R{=~8l1r)NN~U8 ztNQAkd+OfPyH@qu^;WNb_m9=P>d{e0LMB2$M@L6s>dUX5V&K-aDwUZ5FYCroh)@jA39*lbX<|oRE`~DgFeFE2ngaX0ux>e4AuUmLZW5$ zuC*8kz|^$V{32#vhaolN$GL|epg1Xph-eP;2p_7S>b^;ISvM+voGw8)Leo)4Lze>L z2>_ogfS#7l|7#5WN%iz`bp?5P@m3gdsB?3S42|m7>v;~2s;Y8h3K~r44)6|WTx#gn z3kdNEiiijb3D)uP3kWyWj_D4l4{4&5b8`$2BE3fD(BvIP<}i3xpCl;Yr3cXqQBwRd z^X#cL8oHm5{FvcWBa?r%Lj5-@wpQL&7WSYgTUOp~o;>bgZwEKmU*`L+@UKaM#IiAp z;`Ut1zWsbQMmQC2Yvy*7Z!Sl6YJJ*R9pT)RH=^FH&g=wiNQ6A6O5OxKQe{IwDqP@;PBg3fu<=1eplH)H8OGL4fH_;ZkR?MlS9}b=WSqd0 zdc2i>yytqvgo;hX_miSCmNM#Jo!xpctP)}Tey{P69lq_H^4=6@GFyJD=ozeMz{5Ss zuY&I7_6%aTj}Xh67f#;Gp+P2}LPN}1wwxLg(V~khgpbuROF>tMRNKt_uN0)4>noh> zVlcEu<#`9xNP;OF##*DI_Pc6_OGa)pJ_0+wnIeAhd$@*QZ#P^{o{mlq*4W!KXnT|V z(7nS@inz(bi?n02)_YSI`ktbO6ZDbalkXG}=osih*(3dpu4Qg2i`<{%O!^^2#I8AQJ8dLI>vjN6kzo3xvfzZo@XXxce?o(u|} z$(U3d=jzD2tvV_Od?sw|e7Ixbt_kXV{)!vMH2d1T_0^4Z;g_<5C4U;Ym^IFa)Uc{- zrPa(NqDb+d8?8*?vRF1Fy64vF?Ekg0-{C)(>PoV4(fw~}TCR+j-!)65dYYSq_6aJ#%+5|72qMSek9uIg4*@RH zP4hjnWkfp9zrj{~gD=87*$7fqw&9u=WKtiQqa2Kn%g(5X#_dv$L{Y;hAxkTJOESUa z#Ik@#G6xN>El@k0^Wh$rh5Q=0jYLT;rMnzya!!iZyW^zR=q9(6`rP2#qOd!xK&7Q{ zML_Y|Qr6i@4$W>)p@MId%s|Z-z>?gOie}LqLyW!KftgI=N_pNc;RO*Xpq3JHW zph=$JOj{ggM-sALwqQ4SyA=7$S~}d(w86#@S{S=(L~QaXU;ZHbT=X8m~(2CR-5K`&7T1v zXNpiQJWuQvIurV!%vwsjaoOd!SxXFV&^hhBpdQPV@Kif5zVh-tzeSQ*iki+HtmjIn z+@v84x!+I=&n?d|5KGG_@hGgh`u(0G7OeH*(X~Kqn{$Mrl1{~@zu`zajp#ClxPy=b zwj^%x_@YrDY=w`Rnh}?Np5IPoi5J08Nq{!^CI&=QvKSc_OwgIFEyo*A0`trcCVnJi z4nmPr0?T6ZbkHL#4_S$T+-t2puD?HHN#$&c91`X50EvVx%N z=pv!kxrhoys7TvpRBOxuj;KjQ-{`!610C00BC5Ds=SLXvO;7dfuA&tU?&VjoiP_ZFI?~I0cl9vR zJPKSC1g+*RL{G`2v3iUy;AoznxF-YAYIlfuq4L*(U?3>0Ke_IwoQ%btD3v z?Q#Q4hBSlkW*Joe(-QGUgQnc4z)4%|kokh!AUtFFJlNi>Er-@VOUFY-9neIg`7_3D@ z9Sloe6HA>Sp*ujW(?1BE_Zt7&V?YTZ5a|oX#KmO%o)aEdoGuFR3+aQsQosyLx;OOs z?uFlN*#&eu&2EXYYPx9Fl+ILKI=%$y48;eKr|7dKRH7PcY5I%Tx@CkXkTi(s(=G&d z?fYh|UYCf|Wx4Fu>u?`dJAg(GuuK3yPKge=mmMwAuz4zZjkJ-z^pGD8+PiI-)5vH-mgovmGVgjXen!jX37fx5xAOopg)@ zyH%uMNo;Qde)K_}mkva?yrIjDJ|fB~SZ{W`x5mzJM#4Z!OA?j+;wdfjHTBp#m6Ap{ zkxpzOY2E`&Icv2w7&33ZyCPVa9nH!_F{oEt@S>rIFw-=RPtqV-R9~E(d8kVxa(!H$ zT`T#hgpa6EJBwdx=eu@O*fAvo zh+W5rnOwrM-~%7L9olh$+>%1gXXQq*1^Jq2gf4HUxa9=bXZV-b%FR%rb%yo)BR~Q4 z$#mx}gR0ESiY_R}9=lT`G4DL^NB8UMK?#$p`u-ZBWF4-oit<1k^;6H)pFMN@`)~Rz zvJ8i(&Q-_a$_=kry4+YC_p0V=&7yyX#)PJ?AR~g6oSSTPga-=K?h=;g3&lB|0rUB0 z_1%s^(fw&W^t2+Zams~4Zy&9Wo3@7I3SE}s`m$5~FYU(U!nRIjp_EdqfG#v=UVnF# zh@w!f3r33d4P&N0#}x3!hp{+kBi69Oea5TUj^8?R6S^(hnpVy$st`*R2gG^af^A_6 zM!`LauZVQM#7euoo-aaAi>w#<9q<|xy#nUKon9LX+4qtaM1 zVazupHP&DFv%SPz=Rz|{wjvD?IO;RmeD1ubOamuzhWFT>Dle&J9}DtzOikhGdDTJ93AT~yMe&MB?wgS;CAP&gK{nsIoJyMpc(=`H;by_kY^@+$>cx7EY) zqUnD4m!Vtt=UI?8A{Uyi=Jq*_XJ+G-#RONyz(Fy8`p!-WR^ecqt7D6@`JA!DA6qsSqU;_^`sV8JboBoIXELO{H16+o>b87 zRo&+DPIz{80V3VpizPql{P+}+J5Ns5$mQ>1=A2e`hh$tM3%nxu71bdz@>dD~Wr_I<#6S#kJoW)^NWb*POmq{b@ zlAtYyV+-dnm4qO-i7rq*=xz;AC3?5@HomxT=4HJmQgVs721cBVW<9pytPr_8mREb1 z+w1e=1sg4U7aG5j!Pl&iitWbCk;9Ktnb+k(vS{2n1!J8XCQ2TY+A)A#20Yr`GT|$z zL2JK0sbN-*X#>&p%#gA6)9CTpDynnGNO^X$P$Si%OzYwRd6AV7xlx*<>i>nCo+*J@ zxdCJ@Xg>#S`*G{gc09hCOKj4W#zZNQp^EnANZ92J*xKs7;#pE!^@*@x zH)mWjh;!%r_4l#+cyZawYzY{#0Q>O zCB~V&H(bHQ0$m7czUT7fl}o*ZB`B?N+N={Ua_(t?D<8am>Bwl7WHSiQ7>^s{CVamk z7&E?AwD^S<@auTTi5D@T%HpgsaBTKTdLS5^eUgsQ0^r9xluuHj#aUe77~n~|CKvrxy4BK*cVDogU0p#XFgwf?unYu{ z7Fe9!_WPvDZ?$9;jb`m{$(_XnN;g+~_5lIOXHWlrV07U45$miWPR71nl9XF@7S=fL z&HYBp0KM0e#>Dopj~D%oVtX#oSj@IMOZDbkvn!86`_5(ZKF2>zoF6w=-w=43H?9DU z1LUf2-x>$-jz}&UCcrGY6Omjus-8Q32`86%+@b~t+xU`hKqVeKNC1nQ$!4v+Ussvj zC$Ap{e52$fQu{y3eZOkXT+#U@Q{+QG(TYcI9zDdPU_{E$iZ$-XsauCX9pWIqu4uaQ zpOiH9pVT-1C-ppA4k^W2f6cj`%>sjpUEjb z+PaDM>5O9!9(xz5r7_r$VN)yFIQtRAMguM+6vIyD6e8Nn3LAR;XQKkumG(K=1R`8C zeYtzSF!@$8VsL4pg^h8kHrQg3m)rLjH94w*tuIiO*Eze+%v7lPuJ)q+)X^ZJ z=kvS=m3jwu`yJAKx!13_kXBA6FKN-EhHPeXbUt9q8)y8u=*!Kd!_of45`&vw_r39S#CZV1p#wHRE9Ue0=}OcQp?w z>+QYE*_hZEoQIO|lR0*Z$gfCX3K_CUSCQOe*YwT&tX?J`F9NV2Z#-Q%6?}(9IO?a} zQF6|t+lx;^N(?X6hc(Qiq))V@{2mBJ1g+&LVb|#5-Y!wwYzgw$`Wkl8yM3jUVLX`L zUD!}|iV?Ka7sKb0FGQlnE6y*c--6rBs(iCrHz%6CO*xpSu^pD`XCn||mH_D{)XSEX ziUK451gI8BWC&-(iM@Poe#j%GMIs|($yC5Dg{3dD~*Dc zoPtoyA3Q$nxukAm-=RsmJGdlCfzd0Cw>~edZd!*fWQ?t^<;t@L)eoQnHGr>&DXRmQ zn+B{4(0ptGqxpbHt2@8q&P5JC>*H@q;JH3UMdP!`schv|0&{9k*)>q~kcTz3|Hyo` zQ}PAXCp!^7#)If1b%D5bw{4k%=}_|3{qQ&*lg5LMV5Q_PU|{7@e2lzJ(p2?)!ctw=SI01y$ZWRmiOsaEeIus*13!GadD6T~-~Q*+N- z@H9ry?mjE9OPK1)I0C=FyuL9khs;HJJ;K*q>%8NGp8dJlt@@Mhkw^##4VZr}HsUiP zgukfy-?8-{YW~0D9}NAU#eWCQf5`SMun4=k0$0VJe^x literal 0 HcmV?d00001 diff --git a/dlt-meta-demo/launch_demo.py b/dlt-meta-demo/launch_demo.py index 009a2c5..0559f11 100644 --- a/dlt-meta-demo/launch_demo.py +++ b/dlt-meta-demo/launch_demo.py @@ -3,12 +3,12 @@ import time import uuid import argparse +import base64 +import json from databricks_cli.dbfs.api import DbfsApi from databricks_cli.configure.config import _get_api_client from databricks_cli.configure.provider import EnvironmentVariableConfigProvider from databricks_cli.sdk import JobsService, DbfsService, DeltaPipelinesService, WorkspaceService -import base64 -import json def get_api_client(): diff --git a/dlt-meta-demo/launch_techsummit_demo.py b/dlt-meta-demo/launch_techsummit_demo.py new file mode 100644 index 0000000..2ce9767 --- /dev/null +++ b/dlt-meta-demo/launch_techsummit_demo.py @@ -0,0 +1,315 @@ +"""Inegration tests script.""" +import os +import time +import uuid +import argparse +import base64 +from databricks_cli.configure.config import _get_api_client +from databricks_cli.configure.provider import EnvironmentVariableConfigProvider +from databricks_cli.sdk import JobsService, DbfsService, DeltaPipelinesService, WorkspaceService + + +def get_api_client(): + """Get api client with config.""" + config = EnvironmentVariableConfigProvider().get_config() + api_client = _get_api_client(config, command_name="labs_dlt-meta") + return api_client + + +cloud_node_type_id_dict = {"aws": "i3.xlarge", "azure": "Standard_D3_v2", "gcp": "n1-highmem-4"} + + +def create_workflow_spec(job_spec_dict): + """Create Job specification.""" + job_spec = { + "run_name": f"techsummit-dlt-meta-demo-{job_spec_dict['run_id']}", + "tasks": [ + + { + "task_key": "generate_data", + "description": "Generate Test Data and Onboarding Files", + "new_cluster": { + "spark_version": job_spec_dict['dbr_version'], + "num_workers": job_spec_dict['worker_nodes'], + "node_type_id": job_spec_dict['node_type_id'], + "data_security_mode": "LEGACY_SINGLE_USER", + "runtime_engine": "STANDARD" + }, + "notebook_task": { + "notebook_path": f"{job_spec_dict['runners_nb_path']}/runners/data_generator", + "base_parameters": { + "base_input_path": job_spec_dict['dbfs_tmp_path'], + "table_column_count": job_spec_dict['table_column_count'], + "table_count":job_spec_dict['table_count'], + "table_data_rows_count":job_spec_dict['table_data_rows_count'] + } + } + + }, + { + "task_key": "onboarding_job", + "depends_on": [ + { + "task_key": "generate_data" + } + ], + "description": "Sets up metadata tables for DLT-META", + "new_cluster": { + "spark_version": job_spec_dict['dbr_version'], + "num_workers": 0, + "node_type_id": job_spec_dict['node_type_id'], + "data_security_mode": "LEGACY_SINGLE_USER", + "runtime_engine": "STANDARD", + "spark_conf": { + "spark.master": "local[*, 4]", + "spark.databricks.cluster.profile": "singleNode", + }, + "custom_tags": { + "ResourceClass": "SingleNode", + } + }, + "python_wheel_task": { + "package_name": "dlt_meta", + "entry_point": "run", + "named_parameters": { + "onboard_layer": "bronze_silver", + "database": job_spec_dict['database'], + "onboarding_file_path": f"{job_spec_dict['dbfs_tmp_path']}/conf/onboarding.json", + "silver_dataflowspec_table": "silver_dataflowspec_cdc", + "silver_dataflowspec_path": f"{job_spec_dict['dbfs_tmp_path']}/data/dlt_spec/silver", + "bronze_dataflowspec_table": "bronze_dataflowspec_cdc", + "import_author": "Ravi", + "version": "v1", + "bronze_dataflowspec_path": f"{job_spec_dict['dbfs_tmp_path']}/data/dlt_spec/bronze", + "overwrite": "True", + "env": job_spec_dict['env'] + }, + }, + "libraries": [ + { + "pypi": { + "package": "dlt-meta" + } + } + ] + }, + { + "task_key": "bronze_dlt", + "depends_on": [ + { + "task_key": "onboarding_job" + } + ], + "pipeline_task": { + "pipeline_id": job_spec_dict['bronze_pipeline_id'] + } + }, + { + "task_key": "silver_dlt", + "depends_on": [ + { + "task_key": "bronze_dlt" + } + ], + "pipeline_task": { + "pipeline_id": job_spec_dict['silver_pipeline_id'] + } + } + ] + } + print(job_spec) + return job_spec + + +def create_dlt_meta_pipeline( + pipeline_service: DeltaPipelinesService, + runners_nb_path, + run_id, configuration={}): + """Create DLT pipeline.""" + return pipeline_service.create( + name=f"dais-dlt-meta-{configuration['layer']}-{run_id}", + clusters=[ + { + "label": "default", + "num_workers": 4 + } + ], + configuration=configuration, + libraries=[ + { + "notebook": { + "path": f"{runners_nb_path}/runners/init_dlt_meta_pipeline" + } + } + ], + target=f"{configuration['layer']}_{run_id}" + )['pipeline_id'] + + +class JobSubmitRunner(): + """Job Runner class.""" + + def __init__(self, job_client: JobsService, job_dict): + """Init method.""" + self.job_dict = job_dict + self.job_client = job_client + + def submit(self): + """Submit job.""" + return self.job_client.submit_run(**self.job_dict) + + def monitor(self, run_id): + """Monitor job using runId.""" + while True: + self.run_res = self.job_client.get_run(run_id) + self.run_url = self.run_res["run_page_url"] + self.run_life_cycle_state = self.run_res['state']['life_cycle_state'] + self.run_result_state = self.run_res['state'].get('result_state') + self.run_state_message = self.run_res['state'].get('state_message') + + if self.run_life_cycle_state in ['PENDING', 'RUNNING', 'TERMINATING']: + print("Job still running current life Cycle State is " + self.run_life_cycle_state) + elif self.run_life_cycle_state in ['TERMINATED']: + print("Job terminated") + + if self.run_result_state in ['SUCCESS']: + print("Job Succeeded") + print(f"Run URL {self.run_url}") + break + else: + print("Job failed with the state of " + self.run_result_state) + print(self.run_state_message) + break + else: + print( + "Job was either Skipped or had Internal error please check the jobs ui") + print(self.run_state_message) + break + + time.sleep(20) + + +def main(): + """Entry method to run integration tests.""" + args = process_arguments() + + api_client = get_api_client() + username = api_client.perform_query("GET", "/preview/scim/v2/Me").get("userName") + run_id = uuid.uuid4().hex + dbfs_tmp_path = f"{args.__dict__['dbfs_path']}/{run_id}" + database = f"dais_dlt_meta_{run_id}" + runners_nb_path = f"/Users/{username}/techsummit_dlt_meta/{run_id}" + runners_full_local_path = 'demo/tech_summit_dlt_meta_runners.dbc' + + dbfs_service = DbfsService(api_client) + jobs_service = JobsService(api_client) + workspace_service = WorkspaceService(api_client) + pipeline_service = DeltaPipelinesService(api_client) + + try: + fp = open(runners_full_local_path, "rb") + workspace_service.mkdirs(path=runners_nb_path) + workspace_service.import_workspace(path=f"{runners_nb_path}/runners", format="DBC", + content=base64.encodebytes(fp.read()).decode('utf-8')) + bronze_pipeline_id = create_dlt_meta_pipeline( + pipeline_service, runners_nb_path, run_id, configuration={ + "layer": "bronze", + "bronze.group": "A1", + "bronze.dataflowspecTable": f"{database}.bronze_dataflowspec_cdc" + } + ) + + cloud_node_type_id_dict = {"aws": "i3.xlarge", + "azure": "Standard_D3_v2", + "gcp": "n1-highmem-4" + } + job_spec_dict = {"run_id": run_id, + "dbfs_tmp_path": dbfs_tmp_path, + "runners_nb_path": runners_nb_path, + "database": database, + "env": "prod", + "bronze_pipeline_id": bronze_pipeline_id, + "node_type_id": cloud_node_type_id_dict[args.__dict__['cloud_provider_name']], + "dbr_version": args.__dict__['dbr_version'] + } + job_spec_dict = get_datagenerator_details(args, job_spec_dict) + silver_pipeline_id = create_dlt_meta_pipeline( + pipeline_service, runners_nb_path, run_id, configuration={ + "layer": "silver", + "silver.group": "A1", + "silver.dataflowspecTable": f"{database}.silver_dataflowspec_cdc" + } + ) + job_spec_dict["silver_pipeline_id"] = silver_pipeline_id + job_spec = create_workflow_spec(job_spec_dict) + job_submit_runner = JobSubmitRunner(jobs_service, job_spec) + job_run_info = job_submit_runner.submit() + print(f"Run URL {job_run_info['run_id']}") + job_submit_runner.monitor(job_run_info['run_id']) + except Exception as e: + print(e) + finally: + pipeline_service.delete(bronze_pipeline_id) + pipeline_service.delete(silver_pipeline_id) + dbfs_service.delete(dbfs_tmp_path, True) + workspace_service.delete(runners_nb_path, True) + + +def get_datagenerator_details(args, job_spec_dict): + if args.__dict__['table_count']: + job_spec_dict['table_count'] = args.__dict__['table_count'] + else: + job_spec_dict['table_count'] = "100" + if args.__dict__['table_column_count']: + job_spec_dict['table_column_count'] = args.__dict__['table_column_count'] + else: + job_spec_dict['table_column_count'] = "5" + if args.__dict__['table_data_rows_count']: + job_spec_dict['table_data_rows_count'] = args.__dict__['table_data_rows_count'] + else: + job_spec_dict['table_data_rows_count'] = "10" + if args.__dict__['worker_nodes']: + job_spec_dict['worker_nodes'] = args.__dict__['worker_nodes'] + else: + job_spec_dict['worker_nodes'] = "4" + return job_spec_dict + + +def process_arguments(): + """Process command line arguments.""" + parser = argparse.ArgumentParser() + parser.add_argument("--cloud_provider_name", + help="provide cloud provider name. Supported values are aws , azure , gcp") + parser.add_argument("--dbr_version", help="Provide databricks runtime spark version e.g 11.3.x-scala2.12") + parser.add_argument("--dlt_workers", help="Provide Worker node count e.g 4") + parser.add_argument("--dbfs_path", + help="Provide databricks workspace dbfs path where you want run integration tests \ + e.g --dbfs_path=dbfs:/tmp/DLT-META/") + parser.add_argument("--worker_nodes", help="Provide number of worker nodes for data generation e.g 4") + parser.add_argument("--table_count", help="Provide table count e.g 100") + parser.add_argument("--table_column_count", help="Provide column count e.g 5") + parser.add_argument("--table_data_rows_count", help="Provide data rows count e.g 10") + + args = parser.parse_args() + mandatory_args = ["cloud_provider_name", "dbr_version", "dbfs_path"] + check_mandatory_arg(args, mandatory_args) + + supported_cloud_providers = ["aws", "azure", "gcp"] + + cloud_provider_name = args.__getattribute__("cloud_provider_name") + if cloud_provider_name.lower() not in supported_cloud_providers: + raise Exception("Invalid value for --cloud_provider_name! Supported values are aws, azure, gcp") + + print(f"Parsing argument complete. args={args}") + return args + + +def check_mandatory_arg(args, mandatory_args): + """Check mandatory argument present.""" + for mand_arg in mandatory_args: + if args.__dict__[f'{mand_arg}'] is None: + raise Exception(f"Please provide '--{mand_arg}'") + + +if __name__ == "__main__": + main() diff --git a/tech-summit-demo/README.md b/tech-summit-demo/README.md new file mode 100644 index 0000000..f24ed5b --- /dev/null +++ b/tech-summit-demo/README.md @@ -0,0 +1,38 @@ + # Databricks Tech Summit FY 2024 [DLT-META](https://github.com/databrickslabs/dlt-meta) DEMO + +1. Launch Terminal/Command promt + +2. Install [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html) + +3. ```git clone https://github.com/databrickslabs/dlt-meta.git ``` + +4. ```cd dlt-meta/tech-summit-demo``` + +5. Get DATABRICKS_HOST: + - Enter your workspace URL, with the format https://.cloud.databricks.com. To get your workspace URL, see Workspace instance names, URLs, and IDs. + +6. Generate DATABRICKS_TOKEN: + - In your Databricks workspace, click your Databricks username in the top bar, and then select User Settings from the drop down. + + - On the Access tokens tab, click Generate new token. + + - (Optional) Enter a comment that helps you to identify this token in the future, and change the token’s default lifetime of 90 days. To create a token with no lifetime (not recommended), leave the Lifetime (days) box empty (blank). + + - Click Generate. + + - Copy the displayed token + +7. Set environment variable into terminal +``` +export DATABRICKS_HOST= # Paste from Step#5 +export DATABRICKS_TOKEN= # Paste Token here from Step#6, Account needs permission to create clusters/dlt pipelines. +``` + +6. Run the command ```python launch_demo.py --cloud_provider_name=aws --dbr_version=12.2.x-scala2.12 --dbfs_path=dbfs:/tech-summit-dlt-meta-demo-automated/``` + - cloud_provider_name : aws or azure or gcp + - db_version : Databricks Runtime Version + - dbfs_path : Path on your Databricks workspace where demo will be copied for launching DLT-META Pipelines + - worker_nodes : (Optional) Provide number of worker nodes for data generation e.g 4 + - table_count : (Optional) Provide table count e.g 100 + - table_column_count : (Optional) Provide column count e.g 5 + - table_data_rows_count : (Optional) Provide data rows count e.g 10 \ No newline at end of file diff --git a/tech-summit-demo/launch_demo.py b/tech-summit-demo/launch_demo.py new file mode 100644 index 0000000..4158a7e --- /dev/null +++ b/tech-summit-demo/launch_demo.py @@ -0,0 +1,320 @@ +"""Inegration tests script.""" +import os +import time +import uuid +import argparse +import base64 +from databricks_cli.configure.config import _get_api_client +from databricks_cli.configure.provider import EnvironmentVariableConfigProvider +from databricks_cli.sdk import JobsService, DbfsService, DeltaPipelinesService, WorkspaceService + + +def get_api_client(): + """Get api client with config.""" + config = EnvironmentVariableConfigProvider().get_config() + api_client = _get_api_client(config, command_name="labs_dlt-meta") + return api_client + + +cloud_node_type_id_dict = {"aws": "i3.xlarge", "azure": "Standard_D3_v2", "gcp": "n1-highmem-4"} + + +def create_workflow_spec(job_spec_dict): + """Create Job specification.""" + job_spec = { + "run_name": f"techsummit-dlt-meta-demo-{job_spec_dict['run_id']}", + "tasks": [ + + { + "task_key": "generate_data", + "description": "Generate Test Data and Onboarding Files", + "new_cluster": { + "spark_version": job_spec_dict['dbr_version'], + "num_workers": job_spec_dict['worker_nodes'], + "node_type_id": job_spec_dict['node_type_id'], + "data_security_mode": "LEGACY_SINGLE_USER", + "runtime_engine": "STANDARD" + }, + "notebook_task": { + "notebook_path": f"{job_spec_dict['runners_nb_path']}/runners/data_generator", + "base_parameters": { + "base_input_path": job_spec_dict['dbfs_tmp_path'], + "table_column_count": job_spec_dict['table_column_count'], + "table_count":job_spec_dict['table_count'], + "table_data_rows_count":job_spec_dict['table_data_rows_count'] + } + } + + }, + { + "task_key": "onboarding_job", + "depends_on": [ + { + "task_key": "generate_data" + } + ], + "description": "Sets up metadata tables for DLT-META", + "new_cluster": { + "spark_version": job_spec_dict['dbr_version'], + "num_workers": 0, + "node_type_id": job_spec_dict['node_type_id'], + "data_security_mode": "LEGACY_SINGLE_USER", + "runtime_engine": "STANDARD", + "spark_conf": { + "spark.master": "local[*, 4]", + "spark.databricks.cluster.profile": "singleNode", + }, + "custom_tags": { + "ResourceClass": "SingleNode", + } + }, + "python_wheel_task": { + "package_name": "dlt_meta", + "entry_point": "run", + "named_parameters": { + "onboard_layer": "bronze_silver", + "database": job_spec_dict['database'], + "onboarding_file_path": f"{job_spec_dict['dbfs_tmp_path']}/conf/onboarding.json", + "silver_dataflowspec_table": "silver_dataflowspec_cdc", + "silver_dataflowspec_path": f"{job_spec_dict['dbfs_tmp_path']}/data/dlt_spec/silver", + "bronze_dataflowspec_table": "bronze_dataflowspec_cdc", + "import_author": "Ravi", + "version": "v1", + "bronze_dataflowspec_path": f"{job_spec_dict['dbfs_tmp_path']}/data/dlt_spec/bronze", + "overwrite": "True", + "env": job_spec_dict['env'] + }, + }, + "libraries": [ + { + "pypi": { + "package": "dlt-meta" + } + } + ] + }, + { + "task_key": "bronze_dlt", + "depends_on": [ + { + "task_key": "onboarding_job" + } + ], + "pipeline_task": { + "pipeline_id": job_spec_dict['bronze_pipeline_id'] + } + }, + { + "task_key": "silver_dlt", + "depends_on": [ + { + "task_key": "bronze_dlt" + } + ], + "pipeline_task": { + "pipeline_id": job_spec_dict['silver_pipeline_id'] + } + } + ] + } + print(job_spec) + return job_spec + + +def create_dlt_meta_pipeline( + pipeline_service: DeltaPipelinesService, + runners_nb_path, + run_id, configuration={}): + """Create DLT pipeline.""" + return pipeline_service.create( + name=f"dais-dlt-meta-{configuration['layer']}-{run_id}", + clusters=[ + { + "label": "default", + "num_workers": 4 + } + ], + configuration=configuration, + libraries=[ + { + "notebook": { + "path": f"{runners_nb_path}/runners/init_dlt_meta_pipeline" + } + } + ], + target=f"{configuration['layer']}_{run_id}" + )['pipeline_id'] + + +class JobSubmitRunner(): + """Job Runner class.""" + + def __init__(self, job_client: JobsService, job_dict): + """Init method.""" + self.job_dict = job_dict + self.job_client = job_client + + def submit(self): + """Submit job.""" + return self.job_client.submit_run(**self.job_dict) + + def monitor(self, run_id): + """Monitor job using runId.""" + while True: + self.run_res = self.job_client.get_run(run_id) + self.run_url = self.run_res["run_page_url"] + self.run_life_cycle_state = self.run_res['state']['life_cycle_state'] + self.run_result_state = self.run_res['state'].get('result_state') + self.run_state_message = self.run_res['state'].get('state_message') + + if self.run_life_cycle_state in ['PENDING', 'RUNNING', 'TERMINATING']: + print("Job still running current life Cycle State is " + self.run_life_cycle_state) + elif self.run_life_cycle_state in ['TERMINATED']: + print("Job terminated") + + if self.run_result_state in ['SUCCESS']: + print("Job Succeeded") + print(f"Run URL {self.run_url}") + break + else: + print("Job failed with the state of " + self.run_result_state) + print(self.run_state_message) + break + else: + print( + "Job was either Skipped or had Internal error please check the jobs ui") + print(self.run_state_message) + break + + time.sleep(20) + + +def main(): + """Entry method to run integration tests.""" + args = process_arguments() + + api_client = get_api_client() + username = api_client.perform_query("GET", "/preview/scim/v2/Me").get("userName") + run_id = uuid.uuid4().hex + dbfs_tmp_path = f"{args.__dict__['dbfs_path']}/{run_id}" + database = f"dais_dlt_meta_{run_id}" + # int_tests = "demo/" + runners_nb_path = f"/Users/{username}/techsummit_dlt_meta/{run_id}" + runners_full_local_path = 'tech_summit_dlt_meta_runners.dbc' + + dbfs_service = DbfsService(api_client) + jobs_service = JobsService(api_client) + workspace_service = WorkspaceService(api_client) + pipeline_service = DeltaPipelinesService(api_client) + + try: + fp = open(runners_full_local_path, "rb") + workspace_service.mkdirs(path=runners_nb_path) + workspace_service.import_workspace(path=f"{runners_nb_path}/runners", format="DBC", + content=base64.encodebytes(fp.read()).decode('utf-8')) + bronze_pipeline_id = create_dlt_meta_pipeline( + pipeline_service, runners_nb_path, run_id, configuration={ + "layer": "bronze", + "bronze.group": "A1", + "bronze.dataflowspecTable": f"{database}.bronze_dataflowspec_cdc" + } + ) + + cloud_node_type_id_dict = {"aws": "i3.xlarge", + "azure": "Standard_D3_v2", + "gcp": "n1-highmem-4" + } + job_spec_dict = {"run_id": run_id, + "dbfs_tmp_path": dbfs_tmp_path, + "runners_nb_path": runners_nb_path, + "database": database, + "env": "prod", + "bronze_pipeline_id": bronze_pipeline_id, + "node_type_id": cloud_node_type_id_dict[args.__dict__['cloud_provider_name']], + "dbr_version": args.__dict__['dbr_version'] + } + job_spec_dict = get_datagenerator_details(args, job_spec_dict) + silver_pipeline_id = create_dlt_meta_pipeline( + pipeline_service, runners_nb_path, run_id, configuration={ + "layer": "silver", + "silver.group": "A1", + "silver.dataflowspecTable": f"{database}.silver_dataflowspec_cdc" + } + ) + job_spec_dict["silver_pipeline_id"] = silver_pipeline_id + job_spec = create_workflow_spec(job_spec_dict) + job_submit_runner = JobSubmitRunner(jobs_service, job_spec) + job_run_info = job_submit_runner.submit() + print(f"Run URL {job_run_info['run_id']}") + job_submit_runner.monitor(job_run_info['run_id']) + except Exception as e: + print(e) + finally: + pipeline_service.delete(bronze_pipeline_id) + pipeline_service.delete(silver_pipeline_id) + dbfs_service.delete(dbfs_tmp_path, True) + workspace_service.delete(runners_nb_path, True) + try: + os.remove("conf/onboarding.json") + except Exception as e: + print(e) + + +def get_datagenerator_details(args, job_spec_dict): + if args.__dict__['table_count']: + job_spec_dict['table_count'] = args.__dict__['table_count'] + else: + job_spec_dict['table_count'] = "100" + if args.__dict__['table_column_count']: + job_spec_dict['table_column_count'] = args.__dict__['table_column_count'] + else: + job_spec_dict['table_column_count'] = "5" + if args.__dict__['table_data_rows_count']: + job_spec_dict['table_data_rows_count'] = args.__dict__['table_data_rows_count'] + else: + job_spec_dict['table_data_rows_count'] = "10" + if args.__dict__['worker_nodes']: + job_spec_dict['worker_nodes'] = args.__dict__['worker_nodes'] + else: + job_spec_dict['worker_nodes'] = "4" + return job_spec_dict + + +def process_arguments(): + """Process command line arguments.""" + parser = argparse.ArgumentParser() + parser.add_argument("--cloud_provider_name", + help="provide cloud provider name. Supported values are aws , azure , gcp") + parser.add_argument("--dbr_version", help="Provide databricks runtime spark version e.g 11.3.x-scala2.12") + parser.add_argument("--dlt_workers", help="Provide Worker node count e.g 4") + parser.add_argument("--dbfs_path", + help="Provide databricks workspace dbfs path where you want run integration tests \ + e.g --dbfs_path=dbfs:/tmp/DLT-META/") + parser.add_argument("--worker_nodes", help="Provide number of worker nodes for data generation e.g 4") + parser.add_argument("--table_count", help="Provide table count e.g 100") + parser.add_argument("--table_column_count", help="Provide column count e.g 5") + parser.add_argument("--table_data_rows_count", help="Provide data rows count e.g 10") + + args = parser.parse_args() + mandatory_args = ["cloud_provider_name", "dbr_version", "dbfs_path"] + check_mandatory_arg(args, mandatory_args) + + supported_cloud_providers = ["aws", "azure", "gcp"] + + cloud_provider_name = args.__getattribute__("cloud_provider_name") + if cloud_provider_name.lower() not in supported_cloud_providers: + raise Exception("Invalid value for --cloud_provider_name! Supported values are aws, azure, gcp") + + print(f"Parsing argument complete. args={args}") + return args + + +def check_mandatory_arg(args, mandatory_args): + """Check mandatory argument present.""" + for mand_arg in mandatory_args: + if args.__dict__[f'{mand_arg}'] is None: + raise Exception(f"Please provide '--{mand_arg}'") + + +if __name__ == "__main__": + main() diff --git a/tech-summit-demo/tech_summit_dlt_meta_runners.dbc b/tech-summit-demo/tech_summit_dlt_meta_runners.dbc new file mode 100644 index 0000000000000000000000000000000000000000..1daca2c689e260f615812c34e1c55d21ad76e913 GIT binary patch literal 6235 zcmaKwWl&sOvxXr!1b26L9SH937GMZENN@{Gf_nxH79=>qoiGFl0R{=~8l1r)NN~U8 ztNQAkd+OfPyH@qu^;WNb_m9=P>d{e0LMB2$M@L6s>dUX5V&K-aDwUZ5FYCroh)@jA39*lbX<|oRE`~DgFeFE2ngaX0ux>e4AuUmLZW5$ zuC*8kz|^$V{32#vhaolN$GL|epg1Xph-eP;2p_7S>b^;ISvM+voGw8)Leo)4Lze>L z2>_ogfS#7l|7#5WN%iz`bp?5P@m3gdsB?3S42|m7>v;~2s;Y8h3K~r44)6|WTx#gn z3kdNEiiijb3D)uP3kWyWj_D4l4{4&5b8`$2BE3fD(BvIP<}i3xpCl;Yr3cXqQBwRd z^X#cL8oHm5{FvcWBa?r%Lj5-@wpQL&7WSYgTUOp~o;>bgZwEKmU*`L+@UKaM#IiAp z;`Ut1zWsbQMmQC2Yvy*7Z!Sl6YJJ*R9pT)RH=^FH&g=wiNQ6A6O5OxKQe{IwDqP@;PBg3fu<=1eplH)H8OGL4fH_;ZkR?MlS9}b=WSqd0 zdc2i>yytqvgo;hX_miSCmNM#Jo!xpctP)}Tey{P69lq_H^4=6@GFyJD=ozeMz{5Ss zuY&I7_6%aTj}Xh67f#;Gp+P2}LPN}1wwxLg(V~khgpbuROF>tMRNKt_uN0)4>noh> zVlcEu<#`9xNP;OF##*DI_Pc6_OGa)pJ_0+wnIeAhd$@*QZ#P^{o{mlq*4W!KXnT|V z(7nS@inz(bi?n02)_YSI`ktbO6ZDbalkXG}=osih*(3dpu4Qg2i`<{%O!^^2#I8AQJ8dLI>vjN6kzo3xvfzZo@XXxce?o(u|} z$(U3d=jzD2tvV_Od?sw|e7Ixbt_kXV{)!vMH2d1T_0^4Z;g_<5C4U;Ym^IFa)Uc{- zrPa(NqDb+d8?8*?vRF1Fy64vF?Ekg0-{C)(>PoV4(fw~}TCR+j-!)65dYYSq_6aJ#%+5|72qMSek9uIg4*@RH zP4hjnWkfp9zrj{~gD=87*$7fqw&9u=WKtiQqa2Kn%g(5X#_dv$L{Y;hAxkTJOESUa z#Ik@#G6xN>El@k0^Wh$rh5Q=0jYLT;rMnzya!!iZyW^zR=q9(6`rP2#qOd!xK&7Q{ zML_Y|Qr6i@4$W>)p@MId%s|Z-z>?gOie}LqLyW!KftgI=N_pNc;RO*Xpq3JHW zph=$JOj{ggM-sALwqQ4SyA=7$S~}d(w86#@S{S=(L~QaXU;ZHbT=X8m~(2CR-5K`&7T1v zXNpiQJWuQvIurV!%vwsjaoOd!SxXFV&^hhBpdQPV@Kif5zVh-tzeSQ*iki+HtmjIn z+@v84x!+I=&n?d|5KGG_@hGgh`u(0G7OeH*(X~Kqn{$Mrl1{~@zu`zajp#ClxPy=b zwj^%x_@YrDY=w`Rnh}?Np5IPoi5J08Nq{!^CI&=QvKSc_OwgIFEyo*A0`trcCVnJi z4nmPr0?T6ZbkHL#4_S$T+-t2puD?HHN#$&c91`X50EvVx%N z=pv!kxrhoys7TvpRBOxuj;KjQ-{`!610C00BC5Ds=SLXvO;7dfuA&tU?&VjoiP_ZFI?~I0cl9vR zJPKSC1g+*RL{G`2v3iUy;AoznxF-YAYIlfuq4L*(U?3>0Ke_IwoQ%btD3v z?Q#Q4hBSlkW*Joe(-QGUgQnc4z)4%|kokh!AUtFFJlNi>Er-@VOUFY-9neIg`7_3D@ z9Sloe6HA>Sp*ujW(?1BE_Zt7&V?YTZ5a|oX#KmO%o)aEdoGuFR3+aQsQosyLx;OOs z?uFlN*#&eu&2EXYYPx9Fl+ILKI=%$y48;eKr|7dKRH7PcY5I%Tx@CkXkTi(s(=G&d z?fYh|UYCf|Wx4Fu>u?`dJAg(GuuK3yPKge=mmMwAuz4zZjkJ-z^pGD8+PiI-)5vH-mgovmGVgjXen!jX37fx5xAOopg)@ zyH%uMNo;Qde)K_}mkva?yrIjDJ|fB~SZ{W`x5mzJM#4Z!OA?j+;wdfjHTBp#m6Ap{ zkxpzOY2E`&Icv2w7&33ZyCPVa9nH!_F{oEt@S>rIFw-=RPtqV-R9~E(d8kVxa(!H$ zT`T#hgpa6EJBwdx=eu@O*fAvo zh+W5rnOwrM-~%7L9olh$+>%1gXXQq*1^Jq2gf4HUxa9=bXZV-b%FR%rb%yo)BR~Q4 z$#mx}gR0ESiY_R}9=lT`G4DL^NB8UMK?#$p`u-ZBWF4-oit<1k^;6H)pFMN@`)~Rz zvJ8i(&Q-_a$_=kry4+YC_p0V=&7yyX#)PJ?AR~g6oSSTPga-=K?h=;g3&lB|0rUB0 z_1%s^(fw&W^t2+Zams~4Zy&9Wo3@7I3SE}s`m$5~FYU(U!nRIjp_EdqfG#v=UVnF# zh@w!f3r33d4P&N0#}x3!hp{+kBi69Oea5TUj^8?R6S^(hnpVy$st`*R2gG^af^A_6 zM!`LauZVQM#7euoo-aaAi>w#<9q<|xy#nUKon9LX+4qtaM1 zVazupHP&DFv%SPz=Rz|{wjvD?IO;RmeD1ubOamuzhWFT>Dle&J9}DtzOikhGdDTJ93AT~yMe&MB?wgS;CAP&gK{nsIoJyMpc(=`H;by_kY^@+$>cx7EY) zqUnD4m!Vtt=UI?8A{Uyi=Jq*_XJ+G-#RONyz(Fy8`p!-WR^ecqt7D6@`JA!DA6qsSqU;_^`sV8JboBoIXELO{H16+o>b87 zRo&+DPIz{80V3VpizPql{P+}+J5Ns5$mQ>1=A2e`hh$tM3%nxu71bdz@>dD~Wr_I<#6S#kJoW)^NWb*POmq{b@ zlAtYyV+-dnm4qO-i7rq*=xz;AC3?5@HomxT=4HJmQgVs721cBVW<9pytPr_8mREb1 z+w1e=1sg4U7aG5j!Pl&iitWbCk;9Ktnb+k(vS{2n1!J8XCQ2TY+A)A#20Yr`GT|$z zL2JK0sbN-*X#>&p%#gA6)9CTpDynnGNO^X$P$Si%OzYwRd6AV7xlx*<>i>nCo+*J@ zxdCJ@Xg>#S`*G{gc09hCOKj4W#zZNQp^EnANZ92J*xKs7;#pE!^@*@x zH)mWjh;!%r_4l#+cyZawYzY{#0Q>O zCB~V&H(bHQ0$m7czUT7fl}o*ZB`B?N+N={Ua_(t?D<8am>Bwl7WHSiQ7>^s{CVamk z7&E?AwD^S<@auTTi5D@T%HpgsaBTKTdLS5^eUgsQ0^r9xluuHj#aUe77~n~|CKvrxy4BK*cVDogU0p#XFgwf?unYu{ z7Fe9!_WPvDZ?$9;jb`m{$(_XnN;g+~_5lIOXHWlrV07U45$miWPR71nl9XF@7S=fL z&HYBp0KM0e#>Dopj~D%oVtX#oSj@IMOZDbkvn!86`_5(ZKF2>zoF6w=-w=43H?9DU z1LUf2-x>$-jz}&UCcrGY6Omjus-8Q32`86%+@b~t+xU`hKqVeKNC1nQ$!4v+Ussvj zC$Ap{e52$fQu{y3eZOkXT+#U@Q{+QG(TYcI9zDdPU_{E$iZ$-XsauCX9pWIqu4uaQ zpOiH9pVT-1C-ppA4k^W2f6cj`%>sjpUEjb z+PaDM>5O9!9(xz5r7_r$VN)yFIQtRAMguM+6vIyD6e8Nn3LAR;XQKkumG(K=1R`8C zeYtzSF!@$8VsL4pg^h8kHrQg3m)rLjH94w*tuIiO*Eze+%v7lPuJ)q+)X^ZJ z=kvS=m3jwu`yJAKx!13_kXBA6FKN-EhHPeXbUt9q8)y8u=*!Kd!_of45`&vw_r39S#CZV1p#wHRE9Ue0=}OcQp?w z>+QYE*_hZEoQIO|lR0*Z$gfCX3K_CUSCQOe*YwT&tX?J`F9NV2Z#-Q%6?}(9IO?a} zQF6|t+lx;^N(?X6hc(Qiq))V@{2mBJ1g+&LVb|#5-Y!wwYzgw$`Wkl8yM3jUVLX`L zUD!}|iV?Ka7sKb0FGQlnE6y*c--6rBs(iCrHz%6CO*xpSu^pD`XCn||mH_D{)XSEX ziUK451gI8BWC&-(iM@Poe#j%GMIs|($yC5Dg{3dD~*Dc zoPtoyA3Q$nxukAm-=RsmJGdlCfzd0Cw>~edZd!*fWQ?t^<;t@L)eoQnHGr>&DXRmQ zn+B{4(0ptGqxpbHt2@8q&P5JC>*H@q;JH3UMdP!`schv|0&{9k*)>q~kcTz3|Hyo` zQ}PAXCp!^7#)If1b%D5bw{4k%=}_|3{qQ&*lg5LMV5Q_PU|{7@e2lzJ(p2?)!ctw=SI01y$ZWRmiOsaEeIus*13!GadD6T~-~Q*+N- z@H9ry?mjE9OPK1)I0C=FyuL9khs;HJJ;K*q>%8NGp8dJlt@@Mhkw^##4VZr}HsUiP zgukfy-?8-{YW~0D9}NAU#eWCQf5`SMun4=k0$0VJe^x literal 0 HcmV?d00001