diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 7e3f096..3059124 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,5 +1,5 @@ { - "image": "mcr.microsoft.com/devcontainers/dotnet:7.0", + "image": "mcr.microsoft.com/devcontainers/dotnet:7.0-bookworm", "features": { "ghcr.io/devcontainers-contrib/features/npm-package:1": { "package": "typescript", @@ -25,9 +25,13 @@ "ms-azuretools.vscode-azurefunctions", "ms-vscode.azure-account", "ms-azuretools.vscode-azureresourcegroups", + "ms-dotnettools.csdevkit", + "ms-dotnettools.csharp", "humao.rest-client", "github.vscode-github-actions", - "Vue.volar" + "Vue.volar", + "ms-dotnettools.csdevkit", + "ms-dotnettools.csharp" ] } } diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..8efbe82 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,13 @@ +# Thanks to: https://rehansaeed.com/gitattributes-best-practices/ + +# Set default behavior to automatically normalize line endings. +* text=auto + +# Force batch scripts to always use CRLF line endings so that if a repo is accessed +# in Windows via a file share from Linux, the scripts will work. +*.{cmd,[cC][mM][dD]} text eol=crlf +*.{bat,[bB][aA][tT]} text eol=crlf + +# Force bash scripts to always use LF line endings so that if a repo is accessed +# in Unix via a file share from Windows, the scripts will work. +*.sh text eol=lf \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3014ae9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,401 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files +*.ncb +*.aps + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml + +# Environments +*.env \ No newline at end of file diff --git a/app/index.html b/app/index.html index 94c4bfe..67f7762 100644 --- a/app/index.html +++ b/app/index.html @@ -28,7 +28,7 @@ // Get the data from a stored procedure function getPref(ppref) { - fetch('/data-api/rest/getPersonByPet?pet=' + ppref) + fetch('/data-api/rest/GetPersonByPet?pet=' + ppref) .then(res => { if (res.status== '403') { alert('403 - forbidden, try logging in') diff --git a/client/src/components/ToDoList.vue b/client/src/components/ToDoList.vue index ec52952..8e87d73 100644 --- a/client/src/components/ToDoList.vue +++ b/client/src/components/ToDoList.vue @@ -138,7 +138,7 @@ export default { evt.preventDefault(); }, - dragDrop: function(evt, destTodo) { + dragDrop: async function(evt, destTodo) { evt.target.classList.remove("drag"); const sourceId = evt.dataTransfer.getData('itemID') const sourceTodo = this.todos.find((t => t.id == sourceId)) @@ -151,13 +151,13 @@ export default { const userId = this.userId ?? "public"; - fetch(API + `/id/${destTodo.id}`, { + await fetch(API + `/id/${destTodo.id}`, { headers: HEADERS, method: "PATCH", body: JSON.stringify({ order: destTodo.order, owner_id: userId }) }); - fetch(API + `/id/${sourceTodo.id}`, { + await fetch(API + `/id/${sourceTodo.id}`, { headers: HEADERS, method: "PATCH", body: JSON.stringify({ order: sourceTodo.order, owner_id: userId }) diff --git a/docs/2-Database-tasks.md b/docs/2-Database-tasks.md index 34daca6..def77fd 100644 --- a/docs/2-Database-tasks.md +++ b/docs/2-Database-tasks.md @@ -169,9 +169,9 @@ The SQL Database Projects extension is an Azure Data Studio and Visual Studio Co ```SQL CREATE TABLE [dbo].[address] ( - [address_id] INT IDENTITY (1, 1) NOT NULL PRIMARY KEY CLUSTERED ([address_id] ASC), - [person_id] INT NOT NULL, - [address] NVARCHAR (200) NOT NULL, + [address_id] INT IDENTITY (1, 1) NOT NULL PRIMARY KEY CLUSTERED ([address_id] ASC), + [person_id] INT NOT NULL, + [address] NVARCHAR (200) NOT NULL, CONSTRAINT [FK_address_person] FOREIGN KEY ([person_id]) REFERENCES [dbo].[person] ([person_id]) ); ``` @@ -373,11 +373,22 @@ The SQL Database Projects extension is an Azure Data Studio and Visual Studio Co ```SQL -- This file contains SQL statements that will be executed after the build script. - insert into dbo.person(person_name, person_email, pet_preference) values('Bill','bill@contoso.com','Dogs'); - insert into dbo.person(person_name, person_email, pet_preference) values('Frank', 'frank@contoso.com','Cats'); - insert into dbo.person(person_name, person_email, pet_preference) values('Riley', 'Riley@contoso.com','Cats'); - insert into address (person_id, address) values (1, 'Lincoln, MA'); - insert into address (person_id, address) values (2, 'Baltimore, MD'); + + set identity_insert dbo.person on + + insert into dbo.person(person_id, person_name, person_email, pet_preference) values(1,'Bill','bill@contoso.com','Dogs'); + insert into dbo.person(person_id, person_name, person_email, pet_preference) values(2,'Frank', 'frank@contoso.com','Cats'); + insert into dbo.person(person_id, person_name, person_email, pet_preference) values(3,'Riley', 'Riley@contoso.com','Cats'); + + set identity_insert dbo.person off + + set identity_insert dbo.address on + + insert into dbo.address (address_id, person_id, address) values (1, 1, 'Lincoln, MA'); + insert into dbo.address (address_id, person_id, address) values (2, 2, 'Baltimore, MD'); + + set identity_insert dbo.address off + insert into dbo.todo ( [id], @@ -394,6 +405,7 @@ The SQL Database Projects extension is an Azure Data Studio and Visual Studio Co ('00000000-0000-0000-0000-000000000005', N'Add support for sorting', 1, 'public', 5) ; GO + ``` and **save the file**. diff --git a/docs/3-Data-API-builder.md b/docs/3-Data-API-builder.md index 3610b54..1bc9bd5 100644 --- a/docs/3-Data-API-builder.md +++ b/docs/3-Data-API-builder.md @@ -36,27 +36,43 @@ Also, Data API builder is Open Source and works on any platform; on-premises, in cd /workspaces/azure-sql-db-developers-workshop ``` -1. Next step is to create the Data API Builder initialization file. Be sure to replace **PASSWORD** with the password of your database. If you need to find the password again, you can run the +1. Next step is to create the Data API Builder initialization file. You need to get the connection string to connect to the `devDB` database that you created before. Since Data API builder is a .NET application, you can get the correct connection string using the following command: ```bash - sqlcmd config connection-strings + sqlcmd config connection-strings --database devDB | grep ADO.NET ``` - command again. Once you have your database password, replace **PASSWORD** in the following command and then run it at the terminal prompt: + Now, since the connection string is using a login/password pair, we're going to use environment variables to avoid storing the connection string in the Data API Builder configuration file. + + Create an environment file: ```bash - dab init --database-type "mssql" --connection-string "Server=localhost;Database=devDB;User ID=vscode;Password="'PASSWORD'";TrustServerCertificate=true" --host-mode "Development" --rest.path "rest" --set-session-context true + touch .env + ``` + + and then add the connection string to the environment file from Visual Studio Code, setting the `MSSQL` variable to the connection string you obtained in the previous step: + + ```text + MSSQL='Server=tcp:127.0.0.1,1433;Initial Catalog=devDB;Persist Security Info=False;User ID=vscode;Password=...;MultipleActiveResultSets=False;Encrypt=True;TrustServerCertificate=True;Connection Timeout=30;' + ``` + + ![The connection string saved in the environment file](./media/ch3/dab0.png) + + Then, you can use the Data API Builder (DAB) CLI to initialize the configuration file: + + ```bash + dab init --database-type "mssql" --connection-string "@env('MSSQL')" --host-mode "Development" --rest.path "rest" ``` ![A picture of creating the data API builder initialization file using the codespace terminal ](./media/ch3/dab1.png) 1. Once the command completes, there will be a new file created named **dab-config.json**. - ![A picture of the new file named dab-config.json that was created using the dab init command ](./media/ch3/dab2.png) + ![A picture of the new file named dab-config.json that was created using the dab init command](./media/ch3/dab2.png) If you open this file by clicking on it, you will see the connect string for the database but no entities that are REST enabled. - ![A picture of the new file named dab-config.json opened in the codespace editor ](./media/ch3/dab3.png) + ![A picture of the new file named dab-config.json opened in the codespace editor](./media/ch3/dab3.png) ### Adding entries for testing @@ -64,18 +80,17 @@ Also, Data API builder is Open Source and works on any platform; on-premises, in For the **person** table: ```bash - dab add person --source dbo.person --permissions "anonymous:*" + dab add Person --source dbo.person --permissions "anonymous:*" --rest "person" ``` --- ![A picture of adding the person table to the dab-config.json file](./media/ch3/dab4.png) For the **address** table: ```bash - dab add address --source dbo.address --permissions "anonymous:*" + dab add Address --source dbo.address --permissions "anonymous:*" --rest "address" ``` --- ![A picture of adding the address table to the dab-config.json file](./media/ch3/dab5.png) - As seen in the above 2 screenshots, the tables are added to the entities section in the dab-config.json file. ### Adding entries for the Todo Application @@ -84,7 +99,7 @@ Also, Data API builder is Open Source and works on any platform; on-premises, in For the **person** table: ```bash - dab add todo --source dbo.todo --permissions "anonymous:*" + dab add Todo --source dbo.todo --permissions "anonymous:*" --rest "todo" ``` 1. Now that we have 2 tables added to the file, Data API builder can be started and the endpoints tested. Use the following command to start Data API builder locally in the codespace: @@ -188,16 +203,13 @@ http://localhost:5000/rest/person?$select=person_email { "value": [ { - "person_email": "bill@contoso.com", - "person_id": 1 + "person_email": "bill@contoso.com" }, { - "person_email": "frank@contoso.com", - "person_id": 2 + "person_email": "frank@contoso.com" }, { - "person_email": "Riley@contoso.com", - "person_id": 3 + "person_email": "Riley@contoso.com" } ] } @@ -349,7 +361,7 @@ content-type: application/json { "value": [ { - "person_id": 2, + "person_id": 3, "person_name": "Riley", "person_email": "riley@contososales.com", "pet_preference": "Cats" @@ -382,6 +394,34 @@ x-ms-correlation-id: 383d79b4-1646-4828-b66d-60fb0afcc14b ### GraphQL Endpoints +To test the GraphQL endpoints you can either use the `testing.rest` or you can use the interactive playground (enabled as Data API Builder has been configured to run in `development` mode) by opening the website associated with your codespace environment: + +![Visual Studio code showing the list of forwarded ports](../docs/media/ch3/dab-port.png) + +Click on the world icon and then once you see the "Healthy" text in your browser, add `/graphql` to the url, for example: + +```http +https://superior-barnacle-s3xwx94xyqhpzv-5000.app.github.dev/graphql/ +``` + +so that you'll see the GraphQL playground: + +![The welcome page of Banana Cake Pop GraphQL interactive playground](../docs/media/ch3/dab-bcp.png) + +If you are using Banana Cake Pop, in the next samples you only need to copy the text between the curly graph, for example: + +```graphql +{ + people(first: 5) { + items { + person_id + person_name + person_email + } + } +} +``` + #### Get the first 5 records **Request:** @@ -442,7 +482,7 @@ Content-Type: application/json X-Request-Type: GraphQL { - people(orderBy: {person_id: DESC} ) + people(orderBy: { person_id: DESC } ) { items { person_id @@ -494,11 +534,11 @@ X-Request-Type: GraphQL { person_by_pk(person_id: 1) { - person_id - person_name - person_email - pet_preference - } + person_id + person_name + person_email + pet_preference + } } ``` @@ -527,7 +567,7 @@ To create a GraphQL relationship, first stop DAB via the terminal in codespaces. Next, issue the following command in the same terminal window. ```bash -dab update person --relationship "address" --cardinality "many" --target.entity "address" +dab update person --relationship "addresses" --cardinality "many" --target.entity "Address" ``` ![A picture of running the dab update command to create a relationship between the person and address entities](./media/ch3/dab15.png) @@ -536,7 +576,7 @@ After running the command, the dab-config.json file will contain a new relations ```JSON "entities": { - "person": { + "Person": { "source": "dbo.person", "permissions": [ { @@ -547,9 +587,9 @@ After running the command, the dab-config.json file will contain a new relations } ], "relationships": { - "address": { + "Addresses": { "cardinality": "many", - "target.entity": "address" + "target.entity": "Address" } } }, @@ -580,7 +620,7 @@ X-Request-Type: GraphQL person_name person_email pet_preference - address { + addresses { items { address } @@ -602,7 +642,7 @@ X-Request-Type: GraphQL "person_name": "Bill", "person_email": "bill@contoso.com", "pet_preference": "Dogs", - "address": { + "addresses": { "items": [ { "address": "Lincoln, MA" @@ -615,7 +655,7 @@ X-Request-Type: GraphQL "person_name": "Frank", "person_email": "frank@contoso.com", "pet_preference": "Cats", - "address": { + "addresses": { "items": [ { "address": "Baltimore, MD" @@ -628,7 +668,7 @@ X-Request-Type: GraphQL "person_name": "Joel", "person_email": "joel@contoso.com", "pet_preference": "Dogs", - "address": { + "addresses": { "items": [] } } @@ -703,7 +743,7 @@ X-Request-Type: GraphQL person_id person_name pet_preference - address { + addresses { items { address } @@ -724,7 +764,7 @@ X-Request-Type: GraphQL "person_id": 1, "person_name": "Bill", "pet_preference": "Dogs", - "address": { + "addresses": { "items": [ { "address": "Lincoln, MA" @@ -736,7 +776,7 @@ X-Request-Type: GraphQL "person_id": 4, "person_name": "Joel", "pet_preference": "Dogs", - "address": { + "addresses": { "items": [] } } @@ -758,7 +798,7 @@ Content-Type: application/json X-Request-Type: GraphQL mutation { - createperson(item: { + createPerson(item: { person_name: "Elle", person_email: "elle@contoso.com" pet_preference: "Cats" @@ -776,7 +816,7 @@ mutation { ```JSON { "data": { - "createperson": { + "createPerson": { "person_id": 5, "person_name": "Elle", "person_email": "elle@contoso.com", @@ -798,7 +838,7 @@ Content-Type: application/json X-Request-Type: GraphQL mutation { - updateperson(person_id: 4, item: { + updatePerson(person_id: 4, item: { person_email: "joel22@contoso.com" }) { person_id @@ -813,7 +853,7 @@ mutation { ```JSON { "data": { - "updateperson": { + "updatePerson": { "person_id": 4, "person_name": "Joel", "person_email": "joel22@contoso.com" @@ -834,7 +874,7 @@ Content-Type: application/json X-Request-Type: GraphQL mutation { - deleteperson(person_id: 5) + deletePerson(person_id: 5) { person_id } @@ -846,7 +886,7 @@ mutation { ```JSON { "data": { - "deleteperson": { + "deletePerson": { "person_id": 5 } } @@ -866,7 +906,7 @@ Data API builder can also REST/GraphQL enable stored procedures in the database. Next, issue the following command in the same terminal window. ```bash -dab add getPersonByPet --source dbo.get_person_by_pet --source.type "stored-procedure" --source.params "pet:" --permissions "anonymous:execute" --rest.methods "get" --graphql.operation "query" +dab add GetPersonByPet --source dbo.get_person_by_pet --source.type "stored-procedure" --source.params "pet:" --permissions "anonymous:execute" --rest.methods "get" --graphql.operation "query" ``` ![A picture of running the dab add command to enable a stored procedure](./media/ch3/dab16.png) @@ -874,7 +914,7 @@ dab add getPersonByPet --source dbo.get_person_by_pet --source.type "stored-proc After running the command, the dab-config.json file will contain the new entity: ```JSON -"getPersonByPet": { +"GetPersonByPet": { "source": { "type": "stored-procedure", "object": "dbo.get_person_by_pet", @@ -906,15 +946,15 @@ After running the command, the dab-config.json file will contain the new entity: Issue the following commands in the same terminal window. ```bash -dab add insert_todo --source dbo.insert_todo --source.type "stored-procedure" --source.params "title:,owner_id:,order:" --permissions "anonymous:execute" --rest.methods "post" --graphql false +dab add InsertTodo --source dbo.insert_todo --source.type "stored-procedure" --source.params "title:,owner_id:,order:" --permissions "anonymous:execute" --rest "insert_todo" --rest.methods "post" --graphql false ``` ```bash -dab add update_todo --source dbo.update_todo --source.type "stored-procedure" --source.params "id:,title:,owner_id:,completed:false,order:" --permissions "anonymous:execute" --rest.methods "post" --graphql false +dab add UpdateTodo --source dbo.update_todo --source.type "stored-procedure" --source.params "id:,title:,owner_id:,completed:false,order:" --permissions "anonymous:execute" --rest "update_todo" --rest.methods "post" --graphql false ``` ```bash -dab add delete_todo --source dbo.delete_todo --source.type "stored-procedure" --source.params "id:,owner_id:" --permissions "anonymous:execute" --rest.methods "delete" --graphql false +dab add DeleteTodo --source dbo.delete_todo --source.type "stored-procedure" --source.params "id:,owner_id:" --permissions "anonymous:execute" --rest "delete_todo" --rest.methods "delete" --graphql false ``` ### Testing the stored procedures @@ -934,7 +974,7 @@ Use the procedure’s REST endpoint and pass "Dogs" into the pet parameter. **Request:** ```bash -http://localhost:5000/rest/getPersonByPet?pet=Dogs +http://localhost:5000/rest/GetPersonByPet?pet=Dogs ``` **Response:** @@ -971,7 +1011,7 @@ Content-Type: application/json X-Request-Type: GraphQL query { - executegetPersonByPet(pet:"Cats") + executeGetPersonByPet(pet:"Cats") { person_id person_name @@ -986,7 +1026,7 @@ query { ```JSON { "data": { - "executegetPersonByPet": [ + "executeGetPersonByPet": [ { "person_id": 2, "person_name": "Frank", diff --git a/docs/4-swa-cli.md b/docs/4-swa-cli.md index 81cc39c..1a91795 100644 --- a/docs/4-swa-cli.md +++ b/docs/4-swa-cli.md @@ -57,7 +57,7 @@ Static Web Apps has built in [integration with the Azure SQL Database/Local SQL update the dab config file ```bash - dab update todo --map "position:order" + dab update Todo --map "position:order" ``` and looking at the dab-config.json file, we can see the added mapping to our configuration. @@ -173,7 +173,7 @@ Static Web Apps has built in [integration with the Azure SQL Database/Local SQL ```javascript function getPref(ppref) { - fetch('/data-api/api/getPersonByPet?pet=' + ppref) + fetch('/data-api/api/GetPersonByPet?pet=' + ppref) .then(res => { if (res.status== '403') { alert('403 - forbidden, try logging in') @@ -205,7 +205,7 @@ Static Web Apps has built in [integration with the Azure SQL Database/Local SQL ```JSON - "getPersonByPet": { + "GetPersonByPet": { "source": { "type": "stored-procedure", "object": "dbo.get_person_by_pet", @@ -304,7 +304,7 @@ Static Web Apps has built in [integration with the Azure SQL Database/Local SQL ### Working with the ToDo Application -1. Now that you see how SWA CLI works, it's time to try the more complex Todo application. We are going to use 2 versions of this. The first one will use only the /rest/todo REST endpoint with the second one adding the stored procedure table APIs for inserts, updates, and deletes. This using of these table API/Stored procedures will become more clear once we deploy onto Azure SQL Database in the Azure Cloud. +1. Now that you see how SWA CLI works, it's time to try the more complex Todo application. We are going to use 2 versions of this. The first one will use only the `/rest/todo` REST endpoint with the second one adding the stored procedure table APIs for inserts, updates, and deletes. This using of these table API/Stored procedures will become more clear once we deploy onto Azure SQL Database in the Azure Cloud. 1. With SWA CLI stopped, the **swa-cli.config.json** file needs to be swapped with the one in the labFiles folder. Run the following commands at the terminal to do just that. @@ -335,7 +335,7 @@ Static Web Apps has built in [integration with the Azure SQL Database/Local SQL 1. And start swa cli, again at the terminal ```bash - swa start --data-api-location ./swa-db-connections + swa start ``` 1. As with the sample JavaScript application, once Static Web Apps has started, you will get a message in the codespace on the bottom right indicating that it's running on port 4280. @@ -433,7 +433,7 @@ Static Web Apps has built in [integration with the Azure SQL Database/Local SQL 1. Start swa cli, again at the terminal ```bash - swa start --data-api-location ./swa-db-connections + swa start ``` 1. Open the Todo application in the browser if not already open. If opened, refresh the page. @@ -487,7 +487,7 @@ Static Web Apps has built in [integration with the Azure SQL Database/Local SQL 1. Start swa cli, again at the terminal. This will now use the new ToDoList.vue file. ```bash - swa start --data-api-location ./swa-db-connections + swa start ``` 1. Once swa cli has started, you can go back to the application in the browser and refresh the page. All the features of the application should function normally but are now using REST enabled stored procedures. diff --git a/docs/5-deploy-to-azure.md b/docs/5-deploy-to-azure.md index 846f510..9df976d 100644 --- a/docs/5-deploy-to-azure.md +++ b/docs/5-deploy-to-azure.md @@ -199,10 +199,12 @@ The next section of the workshop will be using an Azure SQL Database. To move ou 1. Run the following code in the query sheet: ```SQL - select * from person; + select * from dbo.person; + select p.person_name, a.address - from person p, address a - where p.person_id = a.person_id; + from dbo.person p inner join dbo.[address] a + on p.person_id = a.person_id; + select * from dbo.todo; go ``` @@ -213,7 +215,7 @@ The next section of the workshop will be using an Azure SQL Database. To move ou exec get_person_by_pet 'Dogs'; ``` -## Would you like to know more? 🦟 +## Would you like to know more? ### Azure SQL migration extension for Azure Data Studio diff --git a/docs/6-invoke-REST.md b/docs/6-invoke-REST.md index 9e8fcf5..9af6853 100644 --- a/docs/6-invoke-REST.md +++ b/docs/6-invoke-REST.md @@ -236,7 +236,7 @@ Additional Best Practices from the documentation: ### Sending the prompt text with External REST Endpoint Invocation > [!NOTE] -> The server name in the URL parameter on the next example is aidemo and the headers parameter value for api-key is 1234567890. +> The server name in the URL parameter on the next example is `aidemo` and the headers parameter value for api-key is `1234567890`. > Please change this name and key to align with the values in your account. > @@ -273,6 +273,8 @@ Additional Best Practices from the documentation: @response = @response output; select @ret as ReturnCode, @response as Response; + + select json_value(@response, '$.result.choices[0].message.content') as [Message]; ``` 1. Once the code is in the codespace query editor sheet, run it with the green run arrow in the upper right. @@ -388,31 +390,29 @@ Additional Best Practices from the documentation: ### The Todo application, SWA, and External REST Endpoint Invocation > [!NOTE] -> The server name in the URL parameter on the next example is aidemo and the headers parameter value for api-key is 1234567890. +> The server name in the URL parameter on the next example is `aidemo` and the headers parameter value for api-key is `1234567890`. > Please change this name and key to align with the values in your account. > In this next section, we will be using the Todo application against our Free Azure SQL Database. Then, we will be adding to the insert_todo stored procedure to call OpenAI via External REST endpoint invocation. We will be asking OpenAI to translate the Todo task's title into german and then insert that value into the table. -1. To start, we need to change the database connection in the **staticwebapp.database.config.json** file to use our Free Azure SQL Database. Select the file in codespace and on the top, find the **connection-string**. - - ![A picture of the new file named staticwebapp.database.config.json opened in the code space editor and looking at the connection-string](./media/ch6/rest8.png) - -1. Change the connection-string values to reflect the server name, database name of freeDB, User ID of sqladmin, and the password you used when you created the database. It should look similar to the following: +1. Back in the **SQL Server Connections extension**, right click the database profile name,**Free Azure Database**, and select **New Query**. This will bring up a new query sheet. - ```JSON - "connection-string": "Server=freedbsqlserver.database.windows.net;Database=freeDB;User ID=sqladmin;Password=PASSWORD;TrustServerCertificate=true", - ``` + ![A picture of right clicking the Free Azure Database profile name and selecting New Query](./media/ch6/rest9.png) - and **save the file**. +1. To avoid using an administrative account to allow the application to access the database, we will create a new user and grant it the necessary permissions. Copy and paste the following code into the query sheet, replacing `PASSWORD` with a password of your choice, then run it in the Query editor. - ![A picture of the new file named staticwebapp.database.config.json opened in the code space editor and looking at the connection-string pointing to the Free Azure SQL Database](./media/ch6/rest8a.png) + ```SQL + CREATE USER [swaappuser] WITH PASSWORD = 'PASSWORD'; -1. Back in the **SQL Server Connections extension**, right click the database profile name,**Free Azure Database**, and select **New Query**. This will bring up a new query sheet. + ALTER ROLE db_datareader ADD MEMBER [swaappuser]; + ALTER ROLE db_datawriter ADD MEMBER [swaappuser]; + GRANT EXECUTE ON SCHEMA::dbo TO [swaappuser]; - ![A picture of right clicking the Free Azure Database profile name and selecting New Query](./media/ch6/rest9.png) + GRANT EXECUTE ANY EXTERNAL ENDPOINT TO [swaappuser]; + ``` -1. Copy and paste the following code, then run it in the Query editor. +1. Then, copy and paste the following code, then run it in the Query editor. ```SQL ALTER PROCEDURE dbo.insert_todo @@ -450,10 +450,25 @@ In this next section, we will be using the Todo application against our Free Azu GO ``` +1. Now we need to change the database connection in the **.env** file to use our Free Azure SQL Database. Select the file in codespace and on the top, find the **MSSQL** variable that contains the connection string. + + ![A picture of the file named .env opened in the code space editor and looking at the connection string](./media/ch6/rest8.png) + +1. Change the connection string value to reflect the server name, database name of `freeDB`, User ID of `swappuser` (if you used a different user name in the previous step), and the password you used when you created the database. It should look similar to the following: + + ```bash + MSSQL='Server=freedbsqlserver.database.windows.net;Initial Catalog=freeDB;User ID=swappuser;Password=PASSWORD;', + ``` + + and **save the file**. + + ![A picture of the new file named .env opened in the code space editor and looking at the connection string pointing to the Free Azure SQL Database](./media/ch6/rest8a.png) + + 1. Next, start swa cli again at the terminal ```bash - swa start --data-api-location ./swa-db-connections + swa start ``` 1. Open the Todo application in a browser if not already opened, or refresh the current browser page where it was running. diff --git a/docs/7-sql-bindings.md b/docs/7-sql-bindings.md index a1baae1..b7dcfff 100644 --- a/docs/7-sql-bindings.md +++ b/docs/7-sql-bindings.md @@ -32,6 +32,24 @@ In this section, you will create a change data stream using Change Tracking, the ![A picture of enabling change tracking on the person table](./media/ch7/bind1c.png) +1. The user used by the application to connect to database must also be granted the permission to read the change tracking information. Run the following command to grant the permission to the user: + + ```SQL + GRANT VIEW CHANGE TRACKING ON SCHEMA::dbo TO [swaappuser] + GO + ``` + + ![A picture of granting the permission to read the change tracking information to the user](./media/ch7/bind1d.png) + +1. To keep track of that changes have been already notified, the Azure Functions needs to create some tables, and therefore needs to have permission to run Data Definition Language statements. Run the following command to grant the permission to the user: + + ```SQL + ALTER ROLE db_ddladmin ADD MEMBER [swaappuser] + GO + ``` + + ![A picture of granting the permission to run Data Definition Language statements to the user](./media/ch7/bind1e.png) + ### Create an Azure Function 1. Back in the terminal at the bottom of the page, @@ -67,7 +85,7 @@ In this section, you will create a change data stream using Change Tracking, the then ```bash - dotnet add package Microsoft.Azure.WebJobs.Extensions.Sql --prerelease + dotnet add package Microsoft.Azure.WebJobs.Extensions.Sql ``` ### Create the SQL trigger function @@ -110,13 +128,13 @@ In this section, you will create a change data stream using Change Tracking, the ![A picture of use the value of [dbo].[person] for the database table name then pressing enter](./media/ch7/bind17.png) -1. The SqlTriggerBindingCSharp1.cs file has been created and is in the editor for review. +1. The `SqlTriggerBindingCSharp1.cs` file has been created and is in the editor for review. ![A picture of the SqlTriggerBindingCSharp1.cs file](./media/ch7/bind18.png) -1. There are a few quick changes we need to make in this file. The boilerplate code that has been created has a ToDoItem class. We need to change this to the **person class object**. +1. There are a few quick changes we need to make in this file. The boilerplate code that has been created has a ToDoItem class. We need to change this to the **`Person` class object**. - Replace the ToDoItem class + Replace the `ToDoItem` class ```c# public class ToDoItem @@ -127,14 +145,14 @@ In this section, you will create a change data stream using Change Tracking, the } ``` - with the **person** class + with the **`Person`** class ```c# - public class person + public class Person { public int person_id { get; set; } public string person_name { get; set; } - public string person_email { get; set; } + public string person_email { get; set; } public string pet_preference { get; set; } } ``` @@ -149,11 +167,11 @@ In this section, you will create a change data stream using Change Tracking, the again, the boilerplate has the ToDoItem class referenced. - Just change the ToDoItem with **person** + Just change the `ToDoItem` with **`Person`** ```c# public static void Run( - [SqlTrigger("[dbo].[person]", "connection-string")] IReadOnlyList> changes, + [SqlTrigger("[dbo].[person]", "connection-string")] IReadOnlyList> changes, ILogger log) ``` @@ -163,32 +181,28 @@ In this section, you will create a change data stream using Change Tracking, the 1. If you didn't already, **save the file**. -# MAYBE TODO: CHANGE using Microsoft.Azure.WebJobs.Extensions.Http; to using Microsoft.Azure.WebJobs.Extensions.Sql; - ### Testing the trigger -1. Now that the function code is done, we need to provide it a value for connection-string. If you remember, back in the staticwebapp.database.config.json in the swa-db-connections directory, +1. Now that the function code is done, we need to provide it a value for the `connection-string` Azure Function setting. If you remember, back in the `.env` in the root directory, - ![A picture of opening the staticwebapp.database.config.json in the swa-db-connections directory](./media/ch7/bind19a.png) + ![A picture of opening the .env in the root directory](./media/ch7/bind19a.png) - we had a parameter named connection-string with our Free Azure SQL Database connection. Open the staticwebapp.database.config.json file and copy the connection-string entry. + we stored the connection string for our Free Azure SQL Database connection. Open the `.env` file and copy the connection string entry - ```JSON - "connection-string": "Server=freedbsqlserver.database.windows.net;Database=freeDB;User ID=sqladmin;Password=PASSWORD;TrustServerCertificate=true", + ```text + Server=freedbsqlserver.database.windows.net;Initial Catalog=freeDB;User ID=swaappuser;Password=PASSWORD ``` - ![A picture of opening the staticwebapp.database.config.json and copying the connection-string entry](./media/ch7/bind19.png) - -1. The connection-string parameter can now be placed in the local.settings.json file in the functions project directory. Open the local.settings.json file in the triggerBinding directory. + in the clipboard. - Open the local.settings.json file: +1. The connection string can now be placed in the `local.settings.json` file in the functions project directory. Open the `local.settings.json` file in the `triggerBinding` directory. ![A picture of opening the local.settings.json file in the triggerBinding directory](./media/ch7/bind20.png) -1. Paste the connection-string entry copied from the staticwebapp.database.config.json file just below the **“Values”: {** section in the local.settings.json file +1. Paste the connection string entry copied from the `.env` into the `connection-string` property that you have to manually add to the opened JSON file, just below the **“Values”: {** section in the `local.settings.json` file ```JSON - "connection-string": "Server=freedbsqlserver.database.windows.net;Database=freeDB;User ID=sqladmin;Password=PASSWORD;TrustServerCertificate=true", + "connection-string": "Server=freedbsqlserver.database.windows.net;Database=freeDB;User ID=sqladmin;Password=PASSWORD;", ``` ![A picture of pasting the connection-string entry copied from the staticwebapp.database.config.json file just below the “Values”: { section in the local.settings.json file ](./media/ch7/bind21.png) @@ -200,7 +214,7 @@ In this section, you will create a change data stream using Change Tracking, the 1. Back in the terminal, run the following command to start the Azure Function: ```bash - func host start + func start ``` and once the function is started, right click on the Free Azure Database connection profile and select New Query. diff --git a/docs/8-github-actions.md b/docs/8-github-actions.md index 2ea0a1a..51939c2 100644 --- a/docs/8-github-actions.md +++ b/docs/8-github-actions.md @@ -6,11 +6,14 @@ This chapter will be using the [sql-action GitHub Action](https://github.com/Azure/sql-action) for deploying changes to Azure SQL or SQL Server in a dacpac, SQL scripts, or an SDK-style SQL project. This action will automate a workflow to deploy updates to Azure SQL or SQL Server on a repository push. -https://docs.github.com/en/actions/learn-github-actions/understanding-github-actions +More details can be found here: -https://github.com/actions/checkout -https://github.com/Azure/sql-action +- [Understanding GitHub Actions](https://docs.github.com/en/actions/learn-github-actions/understanding-github-actions) +The two GitHub Actions that will be used in this chapter are: + +- [Checkout GitHub Action](https://github.com/actions/checkout) +- [Azure SQL Deploy](https://github.com/Azure/sql-action) ## GitHub Actions workshop tasks diff --git a/docs/T-known-issues.md b/docs/T-known-issues.md index fd48123..9d17f1d 100644 --- a/docs/T-known-issues.md +++ b/docs/T-known-issues.md @@ -18,6 +18,12 @@ If your codespace shuts down, you can restart it by refreshing the page. After t ![A picture of restarting the database using the docker extension in codespace](./media/cht/trouble1.png) +another option to do the same, if you prefer a command line approach, is to run, from the terminal, the following command: + +```bash +sqlcmd start +``` + ## Testing connection profile message on the bottom right of the codespace and an unexpected error message on reload If you see the testing connection profile message on the bottom right of the codespace constantly running diff --git a/docs/media/ch3/dab-bcp.png b/docs/media/ch3/dab-bcp.png new file mode 100644 index 0000000..5a5e291 Binary files /dev/null and b/docs/media/ch3/dab-bcp.png differ diff --git a/docs/media/ch3/dab-port.png b/docs/media/ch3/dab-port.png new file mode 100644 index 0000000..54956c2 Binary files /dev/null and b/docs/media/ch3/dab-port.png differ diff --git a/docs/media/ch3/dab0.png b/docs/media/ch3/dab0.png new file mode 100644 index 0000000..101138b Binary files /dev/null and b/docs/media/ch3/dab0.png differ diff --git a/docs/media/ch3/dab1.png b/docs/media/ch3/dab1.png index 5e8eb26..cbb315d 100644 Binary files a/docs/media/ch3/dab1.png and b/docs/media/ch3/dab1.png differ diff --git a/docs/media/ch3/dab15.png b/docs/media/ch3/dab15.png index f6f1262..b000f15 100644 Binary files a/docs/media/ch3/dab15.png and b/docs/media/ch3/dab15.png differ diff --git a/docs/media/ch3/dab4.png b/docs/media/ch3/dab4.png index aa8db4d..f35ced6 100644 Binary files a/docs/media/ch3/dab4.png and b/docs/media/ch3/dab4.png differ diff --git a/docs/media/ch3/dab5.png b/docs/media/ch3/dab5.png index 1ddf391..505ff6f 100644 Binary files a/docs/media/ch3/dab5.png and b/docs/media/ch3/dab5.png differ diff --git a/docs/media/ch6/rest8.png b/docs/media/ch6/rest8.png index d4e7a3a..d34b989 100644 Binary files a/docs/media/ch6/rest8.png and b/docs/media/ch6/rest8.png differ diff --git a/docs/media/ch6/rest8a.png b/docs/media/ch6/rest8a.png index 0329579..c9c10c5 100644 Binary files a/docs/media/ch6/rest8a.png and b/docs/media/ch6/rest8a.png differ diff --git a/docs/media/ch7/bind18b.png b/docs/media/ch7/bind18b.png index d3061f8..2af72da 100644 Binary files a/docs/media/ch7/bind18b.png and b/docs/media/ch7/bind18b.png differ diff --git a/docs/media/ch7/bind19a.png b/docs/media/ch7/bind19a.png index 7991c1e..0c60082 100644 Binary files a/docs/media/ch7/bind19a.png and b/docs/media/ch7/bind19a.png differ diff --git a/docs/media/ch7/bind1d.png b/docs/media/ch7/bind1d.png new file mode 100644 index 0000000..88046b8 Binary files /dev/null and b/docs/media/ch7/bind1d.png differ diff --git a/docs/media/ch7/bind1e.png b/docs/media/ch7/bind1e.png new file mode 100644 index 0000000..b3fdc5b Binary files /dev/null and b/docs/media/ch7/bind1e.png differ diff --git a/docs/media/ch7/bind21.png b/docs/media/ch7/bind21.png index 7f6f70c..c2c184b 100644 Binary files a/docs/media/ch7/bind21.png and b/docs/media/ch7/bind21.png differ diff --git a/labFiles/ToDoList.vue.SP b/labFiles/ToDoList.vue.SP index 43f2859..eee6bd0 100644 --- a/labFiles/ToDoList.vue.SP +++ b/labFiles/ToDoList.vue.SP @@ -141,7 +141,7 @@ export default { evt.preventDefault(); }, - dragDrop: function(evt, destTodo) { + dragDrop: async function(evt, destTodo) { evt.target.classList.remove("drag"); const sourceId = evt.dataTransfer.getData('itemID') const sourceTodo = this.todos.find((t => t.id == sourceId)) @@ -154,13 +154,13 @@ export default { const userId = this.userId ?? "public"; - fetch(API_UPDATE, { + await fetch(API_UPDATE, { headers: HEADERS, method: "POST", body: JSON.stringify({ order: destTodo.order, owner_id: userId, id: destTodo.id, completed: destTodo.completed, title: destTodo.title }) }); - fetch(API_UPDATE, { + await fetch(API_UPDATE, { headers: HEADERS, method: "POST", body: JSON.stringify({ order: sourceTodo.order, owner_id: userId, id: sourceId, completed: sourceTodo.completed, title: sourceTodo.title }) diff --git a/labFiles/swa-cli.config.json.todo b/labFiles/swa-cli.config.json.todo index a4021cc..2d815e1 100644 --- a/labFiles/swa-cli.config.json.todo +++ b/labFiles/swa-cli.config.json.todo @@ -3,6 +3,7 @@ "configurations": { "dab-swa-todo": { "appLocation": "client", + "dataApiLocation": "swa-db-connections", "outputLocation": "dist", "appBuildCommand": "npm run build", "run": "npm run dev", diff --git a/scripts/install-dev-tools.sh b/scripts/install-dev-tools.sh index f30aba3..3a30e2b 100644 --- a/scripts/install-dev-tools.sh +++ b/scripts/install-dev-tools.sh @@ -1,32 +1,42 @@ sudo cp ./scripts/ms-repo.pref /etc/apt/preferences.d/ -sudo wget -q https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb + +export dotnet_version="6.0" +export dab_version="0.9.7" +export sqlcmd_version="1.6.0" +export func_version="4" + +export debian_version=$(if command -v lsb_release &> /dev/null; then lsb_release -r -s; else grep -oP '(?<=^VERSION_ID=).+' /etc/os-release | tr -d '"'; fi) + +wget https://packages.microsoft.com/config/debian/$debian_version/packages-microsoft-prod.deb -O packages-microsoft-prod.deb sudo dpkg -i packages-microsoft-prod.deb -sudo apt-get update -sudo apt-get install azure-functions-core-tools-4 -sudo rm -rf packages-microsoft-prod.deb -sudo apt install dotnet-sdk-6.0 -y +rm packages-microsoft-prod.deb +sudo apt update + +sudo apt install dotnet-sdk-$dotnet_version -y + +npm i -g azure-functions-core-tools@$func_version --unsafe-perm true + npm install -g @azure/static-web-apps-cli + dotnet tool install -g microsoft.sqlpackage -dotnet new -i Microsoft.Build.Sql.Templates -dotnet tool install --global Microsoft.DataApiBuilder --version 0.9.6-rc -sudo apt-get update +dotnet new install Microsoft.Build.Sql.Templates + +dotnet tool install -g Microsoft.DataApiBuilder --version $dab_version + sudo apt-get install sqlcmd -sudo wget https://github.com/microsoft/go-sqlcmd/releases/download/v1.4.0/sqlcmd-v1.4.0-linux-amd64.tar.bz2 -sudo bunzip2 sqlcmd-v1.4.0-linux-amd64.tar.bz2 -sudo tar xvf sqlcmd-v1.4.0-linux-amd64.tar +sudo wget https://github.com/microsoft/go-sqlcmd/releases/download/v$sqlcmd_version/sqlcmd-v$sqlcmd_version-linux-amd64.tar.bz2 +sudo bunzip2 sqlcmd-v$sqlcmd_version-linux-amd64.tar.bz2 +sudo tar xvf sqlcmd-v$sqlcmd_version-linux-amd64.tar sudo mv sqlcmd /usr/bin/sqlcmd -sudo rm -rf sqlcmd-v1.4.0-linux-amd64.tar -sudo rm -rf sqlcmd_debug -sudo rm -rf NOTICE.md -echo 'PATH=$PATH:$HOME/.dotnet/tools' >> ~/.bashrc -echo "rm packages-microsoft-prod.deb" -rm packages-microsoft-prod.deb -echo "rm microsoft.gpg" -rm microsoft.gpg -ls -mkdir -p /home/vscode/.swa/dataApiBuilder/0.9.6-rc/ -cd /home/vscode/.swa/dataApiBuilder/0.9.6-rc/ -wget https://github.com/Azure/data-api-builder/releases/download/v0.9.6-rc/dab_linux-x64-0.9.6-rc.zip -unzip dab_linux-x64-0.9.6-rc.zip -chmod 777 * +sudo rm sqlcmd-v$sqlcmd_version-linux-amd64.tar +sudo rm sqlcmd_debug +sudo rm NOTICE.md + +if [[ ":$PATH:" == *":$HOME/.dotnet/tools:"* ]]; then + echo "Path already includes ~/.dotnet/tools, skipping." +else + echo "Adding ~/.dotnet/tools to path." + echo 'PATH=$PATH:$HOME/.dotnet/tools' >> ~/.bashrc +fi + diff --git a/scripts/ms-repo.pref b/scripts/ms-repo.pref new file mode 100644 index 0000000..a788d5b --- /dev/null +++ b/scripts/ms-repo.pref @@ -0,0 +1,3 @@ +Package: dotnet* aspnet* netstandard* +Pin: origin "archive.ubuntu.com" +Pin-Priority: -10 \ No newline at end of file