diff --git a/HOLs/CreateIoTDeviceID/iotdevid_1.png b/HOLs/CreateIoTDeviceID/iotdevid_1.png
new file mode 100644
index 0000000..aee8494
Binary files /dev/null and b/HOLs/CreateIoTDeviceID/iotdevid_1.png differ
diff --git a/HOLs/CreateIoTDeviceID/iotdevid_2.png b/HOLs/CreateIoTDeviceID/iotdevid_2.png
new file mode 100644
index 0000000..6f41205
Binary files /dev/null and b/HOLs/CreateIoTDeviceID/iotdevid_2.png differ
diff --git a/HOLs/CreateIoTDeviceID/iotdevid_3.png b/HOLs/CreateIoTDeviceID/iotdevid_3.png
new file mode 100644
index 0000000..8888ab7
Binary files /dev/null and b/HOLs/CreateIoTDeviceID/iotdevid_3.png differ
diff --git a/HOLs/CreateIoTDeviceID/iotdevid_4.png b/HOLs/CreateIoTDeviceID/iotdevid_4.png
new file mode 100644
index 0000000..035bd50
Binary files /dev/null and b/HOLs/CreateIoTDeviceID/iotdevid_4.png differ
diff --git a/HOLs/CreateIoTDeviceID/iotdevid_5.png b/HOLs/CreateIoTDeviceID/iotdevid_5.png
new file mode 100644
index 0000000..88a0ee7
Binary files /dev/null and b/HOLs/CreateIoTDeviceID/iotdevid_5.png differ
diff --git a/HOLs/Node-RED Flows/Flow 3 - 03 - Alert and Buzzer Nodes Only.json b/HOLs/Node-RED Flows/Flow 3 - 03 - Alert and Buzzer Nodes Only.json
index 4c16b60..884a1ab 100644
--- a/HOLs/Node-RED Flows/Flow 3 - 03 - Alert and Buzzer Nodes Only.json
+++ b/HOLs/Node-RED Flows/Flow 3 - 03 - Alert and Buzzer Nodes Only.json
@@ -1,68 +1,51 @@
[
{
- "id": "620468b8.5bbd38",
- "type": "function",
- "z": "e6060ba3.1c8608",
- "name": "To String",
- "func": "msg.payload = String.fromCharCode.apply(null, new Uint16Array(msg.payload));\nreturn msg;",
- "outputs": 1,
- "noerr": 0,
- "x": 160,
- "y": 400,
- "wires": [
- [
- "46d0d84d.3489a8"
- ]
- ]
- },
- {
- "id": "46d0d84d.3489a8",
+ "id": "230a9a29.67aa16",
"type": "json",
- "z": "e6060ba3.1c8608",
+ "z": "46e83615.8cd758",
"name": "",
- "x": 310,
- "y": 400,
+ "x": 1226,
+ "y": 514,
"wires": [
[
- "7a526de7.f1dd14",
- "88b54d32.90f8",
- "8d65a1b4.e01e2"
+ "3aa0d47a.9838fc",
+ "769401a3.c1eea"
]
]
},
{
- "id": "7a526de7.f1dd14",
+ "id": "338411f3.72a1ce",
"type": "debug",
- "z": "e6060ba3.1c8608",
+ "z": "46e83615.8cd758",
"name": "",
- "active": true,
+ "active": false,
"console": "false",
"complete": "payload",
- "x": 490,
- "y": 400,
+ "x": 1244,
+ "y": 452,
"wires": []
},
{
- "id": "8d65a1b4.e01e2",
+ "id": "769401a3.c1eea",
"type": "function",
- "z": "e6060ba3.1c8608",
- "name": "Get the Message",
- "func": "msg.payload = msg.payload.message\nreturn msg;",
+ "z": "46e83615.8cd758",
+ "name": "Getthe Message",
+ "func": "msg.payload =msg.payload.message\nreturnmsg;",
"outputs": 1,
"noerr": 0,
- "x": 495,
- "y": 513,
+ "x": 1454,
+ "y": 611,
"wires": [
[
- "d268431f.fab68"
+ "e02e72ab.8a63f"
]
]
},
{
- "id": "88b54d32.90f8",
+ "id": "3aa0d47a.9838fc",
"type": "switch",
- "z": "e6060ba3.1c8608",
- "name": "If Temp Alert",
+ "z": "46e83615.8cd758",
+ "name": "IfTempAlert",
"property": "payload.type",
"propertyType": "msg",
"rules": [
@@ -74,55 +57,55 @@
],
"checkall": "false",
"outputs": 1,
- "x": 137,
- "y": 500,
+ "x": 1200,
+ "y": 608,
"wires": [
[
- "77804d3c.37d0c4"
+ "70f32337.f1ef1c"
]
]
},
{
- "id": "77804d3c.37d0c4",
+ "id": "70f32337.f1ef1c",
"type": "function",
- "z": "e6060ba3.1c8608",
- "name": "Turn on Buzzer",
- "func": "msg.payload = 1;\nreturn msg;",
+ "z": "46e83615.8cd758",
+ "name": "Turnon Buzzer",
+ "func": "msg.payload =1;\nreturnmsg;",
"outputs": 1,
"noerr": 0,
- "x": 147,
- "y": 560,
+ "x": 1210,
+ "y": 668,
"wires": [
[
- "f9aa061a.e1f8a8"
+ "f7966fd3.68f8b"
]
]
},
{
- "id": "f9aa061a.e1f8a8",
+ "id": "f7966fd3.68f8b",
"type": "UPM-Grove-Buzzer",
- "z": "e6060ba3.1c8608",
+ "z": "46e83615.8cd758",
"name": "",
"platform": "512",
"volume": "0.125",
"pin": "3",
- "x": 145,
- "y": 618,
+ "x": 1208,
+ "y": 726,
"wires": []
},
{
- "id": "d268431f.fab68",
+ "id": "e02e72ab.8a63f",
"type": "upm-grove-rgb-lcd",
- "z": "e6060ba3.1c8608",
- "name": "Show the Message",
+ "z": "46e83615.8cd758",
+ "name": "ShowtheMessage",
"platform": "512",
"r": 255,
"g": 0,
"b": 0,
"row": "1",
"column": 0,
- "x": 497,
- "y": 620,
+ "x": 1462,
+ "y": 670,
"wires": []
}
-]
\ No newline at end of file
+]
diff --git a/HOLs/Stream Analytics/Stream Analytics Query.sql b/HOLs/Stream Analytics/Stream Analytics Query.sql
index 106dfc5..f6ebfec 100644
--- a/HOLs/Stream Analytics/Stream Analytics Query.sql
+++ b/HOLs/Stream Analytics/Stream Analytics Query.sql
@@ -9,17 +9,3 @@ INTO
sqldb
FROM
iothub
-
--- SELECT ONLY messages
--- WHERE the temperature is > 40
--- FROM the iot hub and put them
--- INTO the alerts event hub
-SELECT
- deviceID,
- [timestamp] as [timestamp],
- temperature
-INTO
- alerts
-FROM
- iothub
-WHERE temperature > 40
\ No newline at end of file
diff --git a/HOLs/images/CreateSQLDB_1.png b/HOLs/images/CreateSQLDB_1.png
new file mode 100644
index 0000000..69d3dff
Binary files /dev/null and b/HOLs/images/CreateSQLDB_1.png differ
diff --git a/HOLs/images/CreateSQLDB_2.1.png b/HOLs/images/CreateSQLDB_2.1.png
new file mode 100644
index 0000000..12c2318
Binary files /dev/null and b/HOLs/images/CreateSQLDB_2.1.png differ
diff --git a/HOLs/images/CreateSQLDB_2.png b/HOLs/images/CreateSQLDB_2.png
new file mode 100644
index 0000000..4477c5e
Binary files /dev/null and b/HOLs/images/CreateSQLDB_2.png differ
diff --git a/HOLs/images/CreateSQLDB_3.png b/HOLs/images/CreateSQLDB_3.png
new file mode 100644
index 0000000..c52fecc
Binary files /dev/null and b/HOLs/images/CreateSQLDB_3.png differ
diff --git a/HOLs/images/CreateSQLDB_4.png b/HOLs/images/CreateSQLDB_4.png
new file mode 100644
index 0000000..fbf245c
Binary files /dev/null and b/HOLs/images/CreateSQLDB_4.png differ
diff --git a/HOLs/readme.md b/HOLs/readme.md
index bade86c..bdc2bfd 100644
--- a/HOLs/readme.md
+++ b/HOLs/readme.md
@@ -35,7 +35,6 @@ Tasks
1. [Getting Started with Grove IoT Commercial Developer Kit](#GettingStartedWithGrove)
1. [Intel NUC Developer Hub Overview](#IntelNucHubOverview)
-1. [Connecting to your Gateway using SSH](#ConnectingWithSSH)
1. [Blinking an LED with Node-RED](#Blinky)
1. [Reading the Temperature Sensor](#ReadingTemperatures)
1. [Planning your Azure Resources](#PlanningAzure)
@@ -45,7 +44,6 @@ Tasks
1. [Processing Temperature Data with Stream Analytics](#ProcessingWithStreamAnalytics)
1. [Displaying Temperature Data with Azure Web Apps](#AzureWebApp)
1. [Sending Messages from the Azure IoT Hub to the Intel Gateway](#CloudToDeviceMessages)
-1. [TIME PERMITTING - Display Temperature Data with Power BI Embedded](#PowerBIEmbedded)
1. [Cleaning Up](#CleaningUp)
___
@@ -92,107 +90,7 @@ Getting Started with Grove IoT Commercial Developer Kit
![IP Address on LCD Panel](images/01090-IPAddressOnLCD.png)
-1. Once the IP Address has been displayed, wait another two minutes and then open your browser and go to the IP Address from the previous step (`http://your.nucs.ip.address` where `your.nucs.ip.address is` the IP Address from above). If you are presented with a **"Privacy Statement"** click **"Continue"**.
-
- > **Note:** Why are we waiting? The "IoT Gateway Developer Hub" is a web application that is delivered by an nginx web server instance that takes a little bit of time to spin up. Just because the IP Address is showing on the LCD, that doesn't mean that the web application is ready yet. **Give it a few of minutes, it may even take up to five minutes or so**, and you'll likely be less frustrated! ***Have some fun while you wait, try turning the knob on the Rotary Angle Sensor to see the background colors on the RGB LCD change. That happens because the default Node-RED flow (we'll see that soon) is reading that sensor value and changing the background color based on the value it reads. Enjoy.***
-
- ![Privacy Statement](images/01100-PrivacyStatement.png)
-
-1. Login using "**`root`**" as both the user name and password:
-
- ![Login as root](images/01110-Login.png)
-
-1. If presented with the "**License Agreement**", click "**Agree**" to continue:
-
- ![License Agreement](images/01120-Eula.png)
-
-1. Once you have successfully logged in, you should see the "**IoT Gateway Developer Hub**" (a.k.a. "**Dev Hub**").
-
- ![IoT Gateway Developer Hub](images/01130-IoTGatewayDeveloperHub.png)
-___
-
-
-Intel NUC Developer Hub Overview
----
-
-The Developer Hub is a front end interface for the Gateway. It has 5 main functions:
-
-- Display sensor data and basic Gateway information on a configurable dashboard
-- Access the Node-RED development environment
-- Add repositories, install and upgrade packages
-- Administer the Gateway – update OS, configure network setting
-- Access documentation
-
-1. The "**Sensors**" page can be used to monitor the sensor data that is being published to the dashboard. By default the gateway is configured to display the value of the rotary angle sensor (knob) attached to the Arduino 101. You can twist the knob to see the values change in the Device and Sensor Information panel on this page.
-
- ![Sensor Dashboard](images/02010-Sensors.png)
-
-1. You can collapse the Device and Sensor Information panel along the top of the page to make the other information below easier to see by clicking the arrow at the top of the navigation bar. Click the button again to expand the panel again.
-
- ![Collapsed Panel](images/02020-CollapsedDeviceAndSensorData.png)
-
-1. The "**Packages**" page allows you to manage the various RPM package repositories and installed packages on your gateway. We will use this page later to add the Microsoft Azure related capabilities to your gateway.
-
- > **Note**: **PLEASE DO NOT INSTALL UPDATES AT THIS TIME**. While normally you would want to update your devices, in the lab environment this will take too long and negatively impact the available network bandwidth. Please refrain from updating your IoT Gateway while at an IoT Camp event. You are welcome to perform the updates back on your own network.
-
- ![Packages](images/02030-Packages.png)
-
-1. The "**Administration**" page provides easy access to a number of tools. The tools most applicable to this lab are the "Quick Tools":
-
- > **Note**: **PLEASE DO NOT INSTALL OS UPDATES OR UPGRADE TO PRO AT THIS TIME**. While these options may be desirable they will take too long for the time frame of this lab as well as have a negative impact on the available bandwidth at the event. Please refrain from updating your IoT Gateway while at an IoT Camp event. You are welcome to perform the updates back on your own network.
-
- - Easy access to the "**Node-RED**" (link) development environment that has been pre-installed on the IoT Gateway. We will use this extensively in this lab.
- - A way to register your gateway with the "**Wind River Helix App Cloud**". We won't be using this feature as part of this lab.
- - Access to the "**LuCI**" ( link ) interface to configure administrative settings on your gateway. **PLEASE DO NOT MAKE CHANGES USING THIS TOOL.**
- - The "**Cloud Commander**" web console, file manager, and editor.
-
- ![Administration](images/02040-Administration.png)
-
-1. The "**Documentation**" page provides a wealth of documentation, community, and source code resources.
-
- ![Documentation](images/02050-Documentation.png)
-
-___
-
-
-Connecting to your Gateway using SSH
----
-
-In order to perform advanced configuration of the Gateway either a monitor and keyboard, or a Secure Shell (SSH) connection is required. On OSX and Linux there are default programs that can do this - Screen and SSH respectively. However on Windows no default ssh client exists, however PuTTY is light weight and easy to install and can be used to SSH into the Gateway.
-
-1. For Windows users:
-
- > **Note**: In the screen shots below, ***192.168.2.13*** is the IP Address of the IoT Gateway being connected to. Replace that IP Address with the IP Address of your IoT Gateway. That is the IP Address that should be displayed on the LCD Panel attached to your Arduino 101.
-
- - Visit the PuTTY download page.
- - Under the "**For Windows on Intel x86**" heading, click on the "**putty.exe**" link to download the latest release version to your computer. Or if you prefer to use an installer that includes all of the additional tools like PSCP and PSFTP click the "**putty-0.67-installer.exe**" link (or latest version).
-
- ![PuTTY Downloads](images/03010-PuttyDownload.png)
-
- - Double-click putty.exe on your computer to launch PuTTY, or run the installer to install it if you chose to download it. Then run PuTTY.
- - Enter IP address of the Gateway
-
- ![PuTTY Configuration](images/03020-PuTTYConfiguration.png)
-
- - If you are presented with a "**PuTTY Security Alert**", click "**Yes**" to continue:
-
- ![PuTTY Security Alert](images/03030-PuTTYSecurityAlert.png)
-
- - You can login with:
- - User Name: **root**
- - Password: **root**.
-
- ![Login as root](images/03040-LoginAsRoot.png)
-
-1. For Mac OSx and Linux users
-
- - Open a terminal Window
- - At the prompt, type the follwing command. Replace `your.nucs.ip.address` with the IP Address of your IoT Gateway:
-
- `ssh root@your.nucs.ip.address`
-
- - Enter ***root*** as the password
-
+1. Once the IP Address has been displayed, wait another two minutes and then open your browser and go to the IP Address from the previous step (`http://your.nucs.ip.address:1880` where `your.nucs.ip.address is` the IP Address from above).
___
@@ -203,14 +101,7 @@ In this exercise, you will use the Node-RED development environment pre-installe
1. To open the Node-RED development environment on your IoT Gateway:
- - In your browser, navigate to `http://your.nucs.ip.address` where `your.nucs.ip.address is` your gateway's IP Address.
- - Click the "**Administration**" link
- - Click the "**Launch**" button under the "**Node-RED**" icon.
-
- ![Launch Node-RED](images/04010-LaunchNodeRed.png)
-
- > **Note**: Accessing the Node-RED environment from the Administration page leaves the IoT Gateway links, etc. still visible at the top of the page and can cause some difficulty with the Node-RED environment. If you prefer to access the Node-RED environment directly, you can do so by navigating to port **1880** on your gateway using **`http://your.nucs.ip.address:1880`** . Again, replace **`your.nucs.ip.address`** with your gateway's IP Address.
-
+ - In your browser, navigate to `http://your.nucs.ip.address:1880` where `your.nucs.ip.address is` your gateway's IP Address.
1. The Node-RED environment will show the default "**Flow 1**" workflow that is responsible for retrieving your gateway's IP Address and displaying it on the LCD panel as well as reading the value from the rotary angle sensor, changing the background color of the RGB LCD based on the sensor value, and displaying it in the charts on the IoT Gateway web portal. Leave this flow as is for now.
![Node-RED Environment](images/04020-NodeRedEnvironment.png)
@@ -274,7 +165,7 @@ We will create Node-RED flow to get data from temperature sensor and display it
1. From the "**UPM_Sensors**" group in the nodes panel, drag a "**Grove Temperature Sensor**" into the visual editor for "**Flow 3**". Then double click on the new node, and set the properties as follows. Click the "**OK**" button to save the changes:
- > **Note**: The interval of 10000ms really means the temperature will only update once everyt 10 seconds. You can speed that up by lowering the number to 5000ms or even 1000ms, but later when we are publishing data to Azure it might be better to publish less frequently just to help reduce network traffic and resource utilization in Azure.
+ > **Note**: The interval of 10000ms really means the temperature will only update once every 10 seconds. You can speed that up by lowering the number to 5000ms or even 1000ms, but later when we are publishing data to Azure it might be better to publish less frequently just to help reduce network traffic and resource utilization in Azure.
- Name - **Leave Blank**
- Platform - **Firmata**
@@ -431,8 +322,6 @@ That means that we need to select a region that supports all of the services we
- Azure Storage
- Azure SQL Database
- Azure Web Apps
-- Azure Function Apps
-- Azure PowerBI Embedded
At the time this is being written (October 2016), the following regions have all of the required services. **THIS LIST WILL GROW OVER TIME**. You are welcome to review the Products available by region to see if any additional regions provide the resources needed. Otherwise, simply pick the region from the list below that is closest to you, and ensure that you choose that region for each resource you deploy.
@@ -473,11 +362,6 @@ The following table is a summary of the Azure services you will create in the la
| Storage Account | ***<name>storage*** | A few of the services require a storage account for their own purposes. This account exists purely as a resource for those services. We won't use it directly for our own purposes. |
| App Service Plan | ***<name>plan*** | The App Service plan provides the execution environment (servers) for our Web App and Function App. We can scale our App Service Plan up or down as needed to get give those services the resources they require to perform as desired. |
| Web App | ***<name>web*** | The Azure Web App is where we will deploy our Node.js application that provides the web site for our solution. We can then go to this site to view temperatures from our devices queried from the SQL Database |
-| Function App | ***<name>functions*** | The Azure Function App contains the ***TempAlert*** function. A single Function App can contain many functions. We'll just have one. |
-| Function | ***TempAlert*** | The ***TempAlert*** function will be triggered automatically whenever a new message is sent to our ***<name>alerts*** event hub. It will then read those messages, retrieve the id of the device it was sent from, and then send a message through the IoT Hub back to that device to let it know that its temperature has exceeded acceptible levels. The device can then sound an alarm by turning on its buzzer. |
-| Power BI Embedded Workspace Collection | ***<name>collection*** | Power BI Embedded Collections are what you configure in Azure to host one or more Power BI Embedded Workspaces. |
-| Power BI Embedded Workspace | ***system generated guid*** | The Power BI Embedded Workspace is where we can upload one or more reports. |
-| Power BI Embedded Report | ***TemperatureChart*** | The ***TemperatureChart*** report is a pre-built report that displays device and temperature data from the ***<name>db*** Azure SQL Database. It is provided as the ***TemperatureChart.pbix*** Power BI Desktop file in the lab files. We'll upload this report into our Power BI Embedded Workspace and then embed it in the UI of our Web Application. Users viewing the web application in their browser can then see that report. |
### Documenting Your Choices ###
@@ -585,7 +469,7 @@ In this task, we'll create the ***<name>iot*** Azure IoT Hub and since it'
`streamanalytics`
- - Click the "**Save**" button at the top of the "**Properties**" blade to save the new consumer group, the close the "**Properties**" blade.
+ - Click the "**Save**" button at the top of the "**Properties**" blade to save the new consumer group, and close the "**Properties**" blade.
![Create streamanalytics Consumer Group](images/07100-StreamAnalyticsConsumerGroup.png)
@@ -597,238 +481,40 @@ Creating an Azure IoT Hub Device Identity
Now that we have our Azure IoT Hub created, we want to create an entry in the hub's device identity registry. A "device identity" in the IoT Hub's device identity registry is basically a unique id and access key that can be used by the actual device in the field (The Intel NUC and with Arduino 101 in our case) to connect to the IoT Hub. The connection string for the device entry in the registry will be used by the actual device to securely connect to the IoT Hub and send and receive messages as that device. You can learn more about Azure IoT Hub devices in the "**Identity registry**" article online.
-At the time this is being written, the Azure Portal does not allow you to provision device identities in the registry, although you can view existing ones. In order to create our device identity, we will use a node.js command line interface for working with your Azure IoT Hubs called "**iothub-explorer**"
-
There is a graphical tool for Windows called "**Device Explorer**". We won't document its use here in this lab, but if you are on Windows and wish to try it can you can download it from here github.com/Azure/azure-iot-sdks/releases/latest (look for the first "**SetupDeviceExplorer.msi**" link) and learn more about it here: How to use Device Explorer for IoT Hub devices.
-1. This task requires that you have Node.js 4.x or later installed. If you don't have it installed already, you can install it from **nodejs.org**. Make sure that Node is added to the path so you can access it from anywhere on the command line.
+1. Goto **Azure Portal**.
-1. Open a command prompt, or terminal window, **on your development PC** (not on the NUC) and install the "iothub-explorer" npm package globally as follows:
+1. Find and click "**Device Explorer**" from EXPLORERS menu of Azure IoT Hub.
- > **Note**: **MAKE SURE TO USE THE -g OPTION TO INSTALL THE PACKAGE GLOBALLY**
+ ![Add Device Identity from portal](CreateIoTDeviceID/iotdevid_1.png)
- ```text
- npm install -g iothub-explorer
- ```
+1. Click **+ Add** button at the top:
- > **Note**: **IF YOU'RE ON A MAC OR LINUX PC, YOU NEED TO USE 'SUDO' TO GAIN ADMIN PRIVILEGES FOR THE COMMAND ABOVE**
+ ![Add Device Identity from portal](CreateIoTDeviceID/iotdevid_2.png)
-1. You should see output similar to the following:
+1. Now name the device, leave options as default and click "**SAVE**" at the botton.
- > **Note**: Your output may look different if you are running a newer version of node, or as the iothub-explorer packages is versioned.
+ ![Add Device Identity from portal](CreateIoTDeviceID/iotdevid_3.png)
+
+1. When it is added successfuly you can see the name of the devie on the portal.
- ```text
- C:\Users\iotde\AppData\Roaming\npm\iothub-explorer -> C:\Users\iotde\AppData\Roaming\npm\node_modules\iothub-explorer\iothub-explorer.js
- iothub-explorer@1.0.14 C:\Users\iotde\AppData\Roaming\npm\node_modules\iothub-explorer
- ├── uuid@2.0.3
- ├── nopt@3.0.6 (abbrev@1.0.9)
- ├── colors-tmpl@1.0.0 (colors@1.0.3)
- ├── prettyjson@1.1.3 (minimist@1.2.0, colors@1.1.2)
- ├── bluebird@3.4.6
- ├── azure-iot-common@1.0.15 (crypto@0.0.3)
- ├── azure-event-hubs@0.0.3 (amqp10@3.2.2)
- ├── azure-iothub@1.0.17 (azure-iot-http-base@1.0.16, azure-iot-amqp-base@1.0.16)
- └── azure-iot-device@1.0.15 (azure-iot-http-base@1.0.16, debug@2.2.0, azure-storage@1.3.1)
- ```
+ ![Add Device Identity from portal](CreateIoTDeviceID/iotdevid_4.png)
-1. Now that we have iothub-explorer installed, we can use it to interact with our Azure IoT Hub. At your command window or terminal prompt, enter:
+1. If you click the name of the device you can see the detail information such as connection strings.
- ```text
- iothub-explorer
- ```
- It will display its usage details:
-
- ```text
- Usage: iothub-explorer [options] [command-options] [command-args]
-
-
- Commands:
-
- login start a session on your IoT hub
- logout terminate the current session on your IoT hub
- list list the device identities currently in your IoT hub device registry
- create create a device identity in your IoT hub device registry
- delete delete a device identity from your IoT hub device registry
- get get a device identity from your IoT hub device registry
- import-devices import device identities in bulk: local file -> Azure blob storage -> IoT hub
- export-devices export device identities in bulk: IoT hub -> Azure blob storage -> local file
- send send a message to the device (cloud-to-device/C2D)
- monitor-feedback monitor feedback sent by devices to acknowledge cloud-to-device (C2D) messages
- monitor-events [device-id] listen to events coming from devices (or one in particular)
- monitor-uploads monitor the file upload notifications endpoint
- monitor-ops listen to the operations monitoring endpoint of your IoT hub instance
- sas-token generate a SAS Token for the given device
- simulate-device simulate a device with the specified id
- get-twin get the twin of a device
- update-twin update the twin of a device and return it.
- query-twin Gets the twin of a device
- query-job [job-type] [job-status] Gets the twin of a device
- device-method [method-payload] [timeout-in-seconds] Gets the twin of a device
- help [cmd] display help for [cmd]
-
- Options:
-
- -h, --help output usage information
- -V, --version output the version number
- ```
-1. Note the `iothub-explorer login` option. This allows you to enter your IoT Hub connection string once, and not have to re-supply the connection string for every command during the "session". The "session" lasts for one hour by default. To login, we'll need the "iothubowner" SAS policy connection string we copied int the "**[myresources.txt](./myresources.txt)**" file previously. Retrieve that string from the file, and use it to login to your Azure IoT Hub with iothub-explorer as follows:
-
- ```text
- iothub-explorer login ""
- ```
- In the example below, note the "**`SharedAccessKeyName=iothubowner`**" portion of the connection string. That's how you know it's the "**`iothubowner`**" connection string:
-
- ```text
- iothub-explorer login "HostName=mic16iot.azure-devices.net;SharedAccessKeyName=iothubowner;SharedAccessKey=MuIeI2Bpp4lm6knbNiXX4J1V+UivTov/ebfIfykWD+g="
- ```
-
- You should see details about your login session returned. Something similar to this:
-
- ```text
- Session started, expires on Wed Dec 14 2016 10:40:28 GMT-0800 (Pacific Standard Time)
- Session file: C:\Users\IoTDev\AppData\Local\iothub-explorer\config
- ```
+ ![Add Device Identity from portal](CreateIoTDeviceID/iotdevid_5.png)
-1. Next, we need to determine the id we will use for the device identity. We will use the same naming convention for the other resources to create a device identity with the following id:
-
- ***<name>IntelIoTGateway***
-
- > **Note**: In a real-world production scenario this id would more likely be a guid, or some kind of value that supported uniqueness across a large number of devices. But to help the id be understandable in the lab, we are using a more human readable string for the id.
-
-1. Create the new device identity using the "**iothub-explorer create**" command. The "**--connection-string**" option at the end asks the utility to return the primary connection string for the device to use to connect to the Azure IoT Hub:
-
- ```text
- iothub-explorer create IntelIoTGateway --connection-string
- ```
- For example:
-
- ```text
- iothub-explorer create mic16IntelIoTGateway --connection-string
- ```
- With this result:
-
- ```text
- Created device mic16IntelIoTGateway
-
- -
- deviceId: mic16IntelIoTGateway
- generationId: 636116504595463314
- etag: MA==
- connectionState: Disconnected
- status: enabled
- statusReason: null
- connectionStateUpdatedTime: 0001-01-01T00:00:00
- statusUpdatedTime: 0001-01-01T00:00:00
- lastActivityTime: 0001-01-01T00:00:00
- cloudToDeviceMessageCount: 0
- authentication:
- SymmetricKey:
- secondaryKey: qb3RG2SjfQ+tz8jZOK/xBPqP9F0K+riha0i5KJNcWdg=
- primaryKey: q9D0X2vXNsQ5LET3TlXx+FpHZ1SP6pQ9+69+hudCIZk=
- x509Thumbprint:
- primaryThumbprint: null
- secondaryThumbprint: null
- -
- connectionString: HostName=mic16iot.azure-devices.net;DeviceId=mic16IntelIoTGateway;SharedAccessKey=q9D0X2vXNsQ5LET3TlXx+FpHZ1SP6pQ9+69+hudCIZk=
- ```
1. Copy the connection string for the new device from the command output, and document it along with your device id in the "**[myresources.txt](./myresources.txt)**" file:
![Document Device Identity](images/08030-DocumentDeviceIdentity.png)
-
-1. If needed, you can use the iothub-explorer to list the existing device identities along with their connection strings as follows:
-
- ```text
- iothub-explorer list --connection-string
- ```
-
-1. Or, you can retrieve the details (including the connection string) of a specific device with:
-
- ```text
- iothub-explorer get --connection-string
- ```
___
Publishing Temperature Sensor Data to the Azure IoT Hub
---
-In this task, we'll update the Intel NUC with some packages to help it talk to our Azure IoT Hub, then we'll modify the "**Flow 3**" flow we created earlier to publish the temperature sensor data to our Azure IoT Hub.
-
-1. Open an ssh connection to your Intel NUC using the method desribed previously. From the prompt, run the following commands:
-
- > **Note**: ***MAKE SURE YOU ARE RUNNING THESE COMMANDS ON THE INTEL NUC VIA AN SSH SESSION.***
-
- > **Note**: If you are using PuTTY, you can copy the command below to your computer's clipboard, then right-click on the PuTTY window to paste it into the remote ssh command prompt. Other ssh clients should offer a similar paste option.
-
- The commands will not return any result unless there was a problem. They are simply adding some public key values used by the apt-get package repositories we will be installing from in the coming steps:
-
- ```text
- rpm --import http://iotdk.intel.com/misc/iot_pub.key
- rpm --import http://iotdk.intel.com/misc/iot_pub2.key
- ```
-
-1. Next, install the `node-red-contrib-os` npm package globally on the NUC. This package (link) adds some special Node-RED nodes that allow you to get information from the local Operating System (OS) where the Node-RED flow is running. Things like OS Info, Drive Info, Memory Info, etc. Install it on the NUC using the following statement:
-
- You will see a number of "***WARN unmet dependency***" messages appear. ***You can safely ignore these***.
-
- ```text
- npm install node-red-contrib-os -g
- ```
-
- ***Keep your ssh connection open, you'll need it later.***
-
-1. Next, we need to add an rpm package repository to the system. In your browser, navigate to your NUC's IP Address and login as root. Then navigate to the "**Packages"** page, and click the "**Add Repo +**" button.
-
- ![Add Repo](images/09010-AddRepo.png)
-
-1. In the "**Manage Repositories**" window, in the fields under "**Add New Repository**" enter the following, and click the "**Add Repository**" button:
-
- > **Note**: The `IoT_Cloud` repo is provided by Intel and includes some packages for working with Azure's IoT services. Once we add a reference to the repo (which is what we are doing here), we can later install packages from it using apt-get.
-
- - Name - **IoT_Cloud**
- - URL - **`http://iotdk.intel.com/repos/iot-cloud/wrlinux7/rcpl13`**
- - Username - **leave blank**
- - Password - **leave blank**
-
- ![Add IoT_Coud Repository](images/09020-AddIoTCloudRepo.png)
-
-1. You will see the following message, indicating that this update may take a few minutes. And it does, so be patient:
-
- "***Adding repository IoT_Cloud. Package list will be updated. This may take a few minutes...***"
-
-1. Once the update is complete, you should see the following message. Click on the "Update Repositories" button. Again, this will take a few mintues to complete:
-
- ![Update Repositories](images/09030-UpdateRepositories.png)
-
-1. When the button states that the repositories have been updated (this will also take a minute or so to update), you can click on the "X" in the top right corner of the "**Manage Repositories**" window to close it:
-
- ![Close Manage Repositories Window](images/09040-CloseManageRepositoriesWindow.png)
-
-1. Next, click the "**Add Packages +**" button:
-
- ![Add Packages](images/09050-AddPackagesButton.png)
-
-1. In the "**Add New Packages**" window, in the search box, type "**cloud-azure**", then click the "**Install**" button next to the "**packagegroup-cloud-azure**" package. Again, this takes a few minutes so be patient:
-
-
- Note: You can see what all is installed with the packagegroup-cloud-azure package here: link
-
- Basically it is all of the npm packages for the various Azure IoT Hub sdks. It also includes a Node-RED node for working with Azure IoT Hubs (link).
-
-
- ![Install packagegroup-cloud-azure](images/09060-InstallPackageGroupCloudAzure.png)
-
-1. Once the package disappears from the list, you can click on the "X" icon to close the "**Add New Packages**" window.
-
- ![Close the Add New Packages Window](images/09070-CloseAddNewPackagesWindow.png)
-
-1. ***Back in your ssh connection to the NUC***, run the following command to restart the Node-RED environment on the NUC. This is necessary because the package that we just installed updated the resources available to Node-RED so it needs to be re-initialized:
-
- ```text
- systemctl restart node-red-experience
- ```
-
-1. Now, ***from your computer*** open the Node-RED development environment in the browser (Remember you can just point your browser to port 1880 on your NUC, eg: `http://your.nucs.ip.address:1880` where `your.nucs.ip.address is` your NUC's IP Address). If you already had it open, make sure to refresh it. In the list of nodes on the left, you should see a new "**cloud**" category, and within it the "**azureiothub**" node:
+1. ***from your computer*** open the Node-RED development environment in the browser (Remember you can just point your browser to port 1880 on your NUC, eg: `http://your.nucs.ip.address:1880` where `your.nucs.ip.address is` your NUC's IP Address). If you already had it open, make sure to refresh it. In the list of nodes on the left, you should see a new "**cloud**" category, and within it the "**azureiothub**" node:
Note: if the "cloud" category and "azureiothubnode" node don't appear, you may need to manually install the "node-red-control-azureiothubnode" package on the NUC. If that is necessary, ssh into the NUC, and from the prompt run the following two commands:
@@ -847,12 +533,12 @@ In this task, we'll update the Intel NUC with some packages to help it talk to o
1. Drag the "**azureiothub**" node onto the Node-RED visual editor, connect it to the "**json**" node's output as shown below, configure it as follows, and click "**OK**"
- Name - "**Azure IoT Hub**"
- - Protocol - "**amqp**"
+ - Protocol - "**HTTP**"
- Connection String - **Paste in the "IoT Hub Device Connection String" you just copied from "[myresources.txt](./myresources.txt)"**. Again, make sure it's the connection string with that contains your "**`DeviceId=IntelIoTGateway`**" device id we created earlier.
![Add azureiothub Node to the Flow](images/09090-AddAzureIoTHubNode.png)
-1. Next, double click on the "**Create Payload**" node. This function generates the actual JSON message that will be sent to the Azure IoT Hub. We will want to be able to retrieve the actual device id from that payload later, so we want to update it to use the device ID we created in our Azure IoT Hub Device Identity registry previously.
+1. Next, double click on the "**Create Payload**" node. This function generates the actual JSON message that will be sent to the Azure IoT Hub. We will want to be able to retrieve the actual device id from that payload later, so we want to update it to use the device ID we created in our Azure IoTadventurous Hub Device Identity registry previously.
- Replace the default "**IntelIoTGateway**" name with the "***<name>IntelIoTGateway***" you created (It should match the "**`DeviceId=IntelIoTGateway`**" device id value in the connection string you used above, then click "**OK**"
@@ -862,63 +548,8 @@ In this task, we'll update the Intel NUC with some packages to help it talk to o
![Deploy the Changes](images/09120-DeployChanges.png)
-1. Now the that device is publishing messages to the IoT Hub, we want to verify that by reading the messages back. From the command prompt or terminal window ***on your system***, run the following command to monitor the messages being sent into your Azure IoT Hub the Node-RED flow running on the NUC:
-
- - You will need to copy the '**IoT Hub "iothubowner" SAS Policy Primary Connection String**' from the "**[myresources.txt](./myresources.txt)**" file.
-
- > **Note**: Again, you need to pay attention here. Make sure to copy the '**IoT Hub "iothubowner" SAS Policy Primary Connection String**' value. It's the one that has "**`SharedAccessKeyName=iothubowner`**" in the connection string. This connection string allows you to connect to your IoT Hub with permissions to manage the hub. That includes of course the permission to read messages that devices send to the hub.
-
- - Use the device id you generated in place of the ***<name>IntelIoTGateway*** device id
-
- ```text
- iothub-explorer monitor-events IntelIoTGateway --login ""
- ```
-
- For example:
-
- ```text
- iothub-explorer monitor-events mic16IntelIoTGateway --login "HostName=mic16iot.azure-devices.net;SharedAccessKeyName=iothubowner;SharedAccessKey=MuIeI2Bpp4lm6knbNiXX4J1V+UivTov/ebfIfykWD+g="
- ```
-
- And you should see output similar to this:
-
- ```text
- Monitoring events from device mic16IntelIoTGateway
- ==== From: mic16IntelIoTGateway ====
- {
- "deviceID": "mic16IntelIoTGateway",
- "timestamp": "2016-10-10T03:49:48.966Z",
- "temperature": 36.959999999999994
- }
- ====================
- ==== From: mic16IntelIoTGateway ====
- {
- "deviceID": "mic16IntelIoTGateway",
- "timestamp": "2016-10-10T03:49:59.006Z",
- "temperature": 37.62
- }
- ====================
- ==== From: mic16IntelIoTGateway ====
- {
- "deviceID": "mic16IntelIoTGateway",
- "timestamp": "2016-10-10T03:50:09.085Z",
- "temperature": 36.959999999999994
- }
- ====================
- ```
-
1. Remember that we had the Node-RED flow only getting temperatue values once every 10 seconds (10000ms). It is recommended that you don't publish too much more frequently during this event. It just helps to reduce the amount of traffic on the network.
-1. If you are feeling adventurous, trade iothubowner connection strings and device IDs with a neighbor in the lab and verify that you can monitor each other's devices. For example:
-
- ```bash
- iothub-explorer monitor-events --login ""
- ```
-
-1. One last comment, we are using the "**iothubowner**" connection string to monitor the events. You could actually use a less privileged policy, like the "**service**" sas policy we copied the connection string for earlier. Go ahead and try monitoring events with the **IoT Hub "service" SAS Policy Primary Connection String** policy connection string (the one with "**`SharedAccessKeyName=service`**" in it) you pasted into the [myresources.txt](./myresources.txt) file. It should work just fine because that SAS policy has permissions to read messages from the IoT Hub and that is all the permissions that `iothub-explorer monitor-events` needs.
-
-1. To stop monitoring events, press **Ctrl-C** at the command prompt and confirm exiting the script.
-
___
@@ -968,7 +599,7 @@ We'll start out creating the ***<name>sql*** Azure SQL Server, and the ***
1. Click "**Pricing tier**", then find and select the "**B Basic**" pricing tier, and click the "**Select**" button to select it.
- ![Basic Pricing Tier](images/10040-SQLDBPricingTier.png)
+ ![Basic Pricing Tier](images/CreateSQLDB_1.png)
1. Finally, we can create the new Azure SQL Database and Server. Ensure that the "**Pin to dashboard**" checkbox is **checked**, and click the "**Create**" button to create them.
@@ -996,129 +627,36 @@ We'll start out creating the ***<name>sql*** Azure SQL Server, and the ***
![Document SQL](images/10085-DocumentSQL.png)
-1. Now that we have the database created, we need to create the database objects inside it. To do that, we'll use Visual Studio Code, and the "**mssql**" extension. Ensure "**Visual Studio Code**" is open to the "**HOLs/**" as instructed previously.
-
- ![HOLs Folder Open in VS Code](images/10090-HOLsFolderInCode.png)
-
-1. Then click the icon to open the "**Extensions**" panel, and in the search box at the top type "**mssql**", and in the search results, click the "**Install**" button for the "**mssql**" extension.
-
- > **Note**: Extensions provide a powerful way to expand the capabilities of Visual Studio Code. There is a rich ecosystem of extensions developed by Microsoft as well as a worldwide community of developers that you can use in "**Visual Studio Code**". The "**mssql**" extension we are installing here allows you to connect to your "**Azure SQL Database**" from with "**Visual Studio Code**" and execute SQL statements.
-
- ![Install the mssql Extension](images/10100-InstallVsCodeMssql.png)
-
-1. Once installed, click the "**Reload**" button to enable the extension, and when prompted click "**Reload Window**" to allow VS Code to restart:
-
- ![Enable Extension](images/10110-EnableExtension.png)
-
- ![Confirm Restart](images/10120-ConfirmVSCodeRestart.png)
-
-1. Next, we need to tell the **mssql** extension how to connect to our Azure SQL Server and Database. To do so, from the menu bar, select "**File**" | "**Preferences**" | "**Settings**"
-
- ![Open Workspace Settings](images/10130-OpenWorkspaceSettings.png)
-
-1. Then, in the **settings.json** file, click the "**Workspace Settings**" header. Locate the connection properties for the SQL Server connection in the file:
-
- ![Connection Properties](images/10140-SQLConnectionProperties.png)
-
- And replace them with the appropriate values from your "**[myresources.txt](./myresources.txt)**" file. For example:
-
- ![Completed SQL Connection Properties](images/10145-CompletedSQLConnectionProperties.png)
-
- Save and close the **settings.json** file when you are done.
-
-1. Click on the icon for the "**Explorer**" panel, select the "**SQL Database Scripts\Create SQL Database Objects.sql**" file. If this is the first "**.sql**" file you have opened since installing the "**mssql**" extension, you may see some the "**OUTPUT**" panel appear to show the output of the sql tools initialization. You may also be prompted to view the release notes, if so you can just click "**Close**":
-
- ![SQL Tools Initialization](images/10147-SQLToolsInitialization.png)
-
-1. On Windows, you may geta firewall prompt. Make sure to confirm the firewall prompt, and to enable it on all network types:
-
- ![Windows Firewall Prompt](images/10148-FirewallPrompt.png)
-
-1. The "**Create SQL Database Objects.sql**" script creates the following objects:
+1. Click on the icon for the "**Explorer**" panel, select the "**SQL Database Scripts\Create SQL Database Objects.sql**" file. Copy **all** queries from here.
+ ![SQL Tools Initialization](images/10147-SQLToolsInitialization.png)
+
+The "**Create SQL Database Objects.sql**" script creates the following objects:
- The "**dbo.Measurement**" table. This table is structured to match the data being sent by the Node-RED flow Intel NUC. It has the following columns:
- "**MeasurementID**" is a dynamically generated ID for each row in the database.
- "**deviceID**" is a nvarchar field that will store the device id sent by the device.
- "**timestamp**" is a datetime field that will store the "**timestamp** generated on the Intel NUC when the message was created.
- "**temperature**" is a float column that will store the temperature sensor values.
- The "**dbo.RecentMeasurements**" view is used by the web application to display the 20 most recent messages.
- - The "**dbo.Devices**" view is used by the web application to display a row for each device, along with their latest reading
-
- ![Open SQL Script](images/10150-OpenCreateScript.png)
-
-1. Make sure the "**Create SQL Database Objects.sql**" file is the currently active file in VS Code, then press the "**F1**" key (or **Ctrl+Shift+P**) to open the "**Command Palette**", and in the box, type "**>MS SQL: Connect**" (don't forget the **>**) and press "**Enter**" (Note you could also have used the **Ctrl-Shift-C** keyboard shortcut while the SQL script file was active).
-
- ![MS SQL Connect](images/10155-MSSQLConnect.png)
-
- ![Select Connection](images/10160-SelectConnection.png)
-
-1. If you receive a connection error, verify the entries in the settings.json file, and also ensure that you created the firewall rule on the server to allow all connections.
-
-1. Once you have connected successfully, the server name will show up in the VS Code status bar:
-
- ![Successful Connection](images/10180-SuccessfulConnection.png)
-
-1. Finally, again ensure that the "**Create SQL Database Objects.sql**" file is the active file in VS Code. Press "**F1**" or "**Ctrl+Shift+P**" to open the "**Command Palette**" and enter "**>MSSQL: Run T-SQL query**" to execute the code in the current file (You can also just press "**Ctrl-Shift-E**" to execute the code in the currently active SQL script).
-
- ![Run the SQL Script](images/10190-RunSQLScript.png)
-
-1. The "**Results: Create SQL Database Objects.sql**" tab will open, display the results of the script exectution. You can close the "**Results: Create SQL Database Objects.sql**" and "**Create SQL Database Objects.sql**" tabs when you are done.
-
- ![SQL Results](images/10210-SQLResults.png)
-
-### Create the Event Hub ###
-
-Next up is the ***<name>alerts*** Event Hub that the ***<name>job*** Stream Analytics Job will forward messages with high temperature readings off to.
-
-1. With your browser open to the **Azure Portal** (https://portal.azure.com), close any blades that may be left open from previous steps.
-
-1. Click "**+ New**" | "**Internet of Things**" | "**Event Hubs**"
-
- > **Note**: the name is a bit misleading. Before we can create an Event Hub, we need to create the "**Service bus namespace**" that will host it. We are actually choosing to create an Event Hubs compatible Service Bus Namespace.
-
- ![New Event Hub](images/10220-NewEventHub.png)
-
-1. In the "**Create namespace**" blade that opens, complete the properties as follows, then click the "**Create**" button to create the Service Bus Namespace that will hold our Event Hub:
-
- - Name - ***<name>ns***
- - Pricing tier - Select the "**Standard**" tier
- - Subscription - **Chose the same subscription used for the previous resources**
- - Resource group - Choose "**Use existing**" and select the ***<name>group*** resource group created previously
- - Location - **Use the same location as the previous resources**
- - Pin to dashboard - **Checked**
-
- ![Create Namespace](images/10230-CreateNamespace.png)
+ - The "**dbo.Devices**" view is used by the web application to display a row for each device, along with their latest reading
+
+1. Goto **Azure Portal**, and select SQL DB management blade.
-1. Within a few minutes, the namespace should be provisioned, with its property blade opened in the portal. Click the "**+Event Hub**" button along the top to create the actual event hub:
+1. Select **Tool** at the top.
- ![Create Event Hub](images/10240-CreateEventHub.png)
+ ![MS SQL Connect](images/CreateSQLDB_2.png)
-1. In the "**Create Event Hub**" blade complete the properties as follows, then click the "**Create**" button to create the event hub.
+1. Select **Query editor (preview)**.
- - Name - ***<name>alerts***
- - Partition Count - **2**
- - Message Retention - **1**
- - Archive - **Off**
+ ![MS SQL Connect](images/CreateSQLDB_2.1.png)
- ![Create New Event Hub](images/10250-NewEventHubProperties.png)
+1. Login using SQL Server authentication type.
-1. Wait until the new event hub is created successfully before continuing:
+ ![MS SQL Connect](images/CreateSQLDB_3.png)
- ![Event Hub Created](images/10260-EventHubCreated.png)
+1. Paste copied queries and run.
-1. Click the "**Shared access policies**" link along the left hand side, and then click on the "**RootManageSharedAccessKey**"
-
- ![SAS Policies](images/10262-SASPolicies.png)
-
-1. Then on the "**Policy: RootManageSharedAccessKey**" page, click the button to copy the primary connection string to the clipboard:
-
- ![Copy the RootManageSharedAccess Key](images/10263-RootManagedSharedAccessKey.png)
-
-1. Take a minute to document your Event Hub Namespace and Event Hub names:
-
- ![Document Event Hub](images/10265-DocumentEventHub.png)
-
-1. Close all the open blades.
+ ![MS SQL Connect](images/CreateSQLDB_4.png)
### Create the Stream Analytics Job ###
@@ -1178,48 +716,28 @@ Great, now we have all the pieces that the ***<name>job*** Stream Analytic
![Successful Connection](images/10330-SuccessfulConnectionTest.png)
-1. Again, press the "**+ Add**" button at the top of the "**Outputs**" blade, then complete the "**New output**" blade properties as follows and click "**Create**":
-
- - Name - **alerts** (Again, you really should just use this name and not change it)
- - Sink - **Event hub**
- - Subscription - **Use event hub from current subscription**
- - Service bus namespace - ***<name>ns***
- - Event hub name - ***<name>alerts***
- - Event hub policy name - **RootManageSharedAccessKey**
- - Partition key column - **0**
- - Event serialization format - **JSON**
- - Encoding - **UTF-8**
- - Format - **Line separated**
-
- ![Alerts Output Properties](images/10340-AlertsOuputProperties.png)
-
-1. Verify that the test connection the **alerts** output was successful:
-
- ![Alerts Test Success](images/10350-SuccessfulAlertsTest.png)
-
1. Close the "**Outputs**" blade, and then back on the job blade, click the "**Query**" button.
![Query](images/10360-Queries.png)
-1. Back in Visual Studio Code with the "**HOLs**" folder open, locate open the "**Stream Analytics\Stream Analytics Query.sql**" file and copy its contents to the clipboard:
+1. Back in Visual Studio Code with the "**HOLs**" folder open, locate open the "**Stream Analytics\Stream Analytics Query.sql**" file and copy its contents to the clipboard, **OR** copy query from here.
> **Note**: Stream Analytics uses a SQL like syntax for its queries. It provides a very powerful way to query the data streaming through the Azure IoT Hub as if it were data in a table. Pretty cool!
- ![Copy ASA Query](images/10370-CopyASAQuery.png)
-
+ ```
+ SELECT deviceID, [timestamp] as [timestamp], temperature
+ INTO sqldb
+ FROM iothub
+ ```
1. Back in the browser, replace the default syntax in the query with the code you just copied, then click the "**Save**" button along the top.
- <Note: There are actually two queries here.
+ <Note: There are actually one query here.
- - The first one queries all of the messages from the "iothub" intput and dumps them into the "sqldb" output.
- - The second query looks for messages coming in from the "iothub" input only where the "temperature" value is greater than 40 and then sends those messages into the "alerts" output.
+ - The query all of the messages from the "iothub" intput and dumps them into the "sqldb" output.
- You may decide to change the 40 threshold value to something more appropriate for the temperatures your sensor is reporting. You want something that is higher than the average value, but low enough that you can reach it by forcefully heating up your temp sensor.
- ![Create Query](images/10380-CreateQuery.png)
-
1. Close the "**Query**" blade, and back on the job blade, click the "**Error Policy** link along the left. Change the policy to "**Drop**" and click the "**Save**" button along the top:
![Drop Errors](images/10385-DropErrors.png)
@@ -1236,14 +754,13 @@ Great, now we have all the pieces that the ***<name>job*** Stream Analytic
![Job Started](images/10400-JobStarted.png)
-1. At this point, we should have data being forwarded into our SQL Database. We can verify that using Visual Studio Code. In Code, open the "**SQL Database Scripts\Query Measurement Data.sql**" file. Make sure the file is active by clicking into it, and then execute the entire query by pressing **Ctrl+Shift+E** (If prompted, select your SQL Server connection again). You should see the results of the three queries show up on the "**Results**" tab.
-
- > **Note**: You need to be connected to the SQL Server already. If you aren't, press **Ctrl+Shift+C**, select your SQL Connection and press enter. Also make sure that your cursor is INSIDE the "**Query Measurement Data.sql**" file in the editor or the **Ctrl+Shift+C** and **Ctrl+Shift+E** do different things (open a command prompt, and open the explorer panel respectively).
-
- ![SQL Data](images/10410-SQLData.png)
-
-1. Pretty cool! Our device message data is now getting pushed into our Azure SQL Database. Of course, we also have the "**alerts**" output which can push high temperature messages off to our ***<name>alerts*** event hub, but we'll come back to those in another task. For now, pat yourself on the back, this task had a lot of steps.
+1. At this point, we should have data being forwarded into our SQL Database. We can verify that using Azure Portal **Query Editor (preview)**. In Code, open the "**SQL Database Scripts\Query Measurement Data.sql**" file and copy the queries. Goto Azure portal, open SQL DB management blaze. Or copy from here.
+ ![Job Started](images/CreateSQLDB_2.png)
+
+ ![Job Started](images/CreateSQLDB_2.1.png)
+
+ ![Job Started](images/CreateSQLDB_3.png)
___
@@ -1381,7 +898,7 @@ The reason for this is that we are going to be using the Node.js Debugging and G
### Deploying the Web App to Azure ###
-The last step is to get this web application running in Azure, not locally. Earlier, when we configured the Web Application we set it up to deploy from a git repository. To deploy we can simply push our web app code via git up to the Web App's repo. Visual Studio Code makes that simple to do!.
+The last step is to get this web application running in Azure, not locally. Earlier, when we configured the Web Application we set it up to deploy from a git repository. To deploy we can simply push our web app code via git up to the Web App's repo. Visual Studio Code makes that simple to do!
1. In Visual Studio Code, click the "**git**" icon to open the "**git**" panel, and then click the "**Initialize git repository**" button.
@@ -1495,507 +1012,6 @@ We'll configure our Intel NUC to turn on its buzzer for one second if it receive
![Test Buzzer Button on Web Site](images/12050-TestBuzzerOnWebsite.png)
![Buzzer Test Message](images/12060-BuzzerTestMessageOnLcd.png)
-
-### Create an Azure Function to Process the Event Hub Messages and Alert the Device ###
-
-Previously, we setup our ***<name>job*** Stream Analytics Job to forward messages coming in from the IoT Hub that had a temperature value over some threshold off to the ***<name>alerts*** event hub. But we never did anything with those Event Hub alert messages. Now we will.
-
-In this task, we'll create the **TempAlert** function and have it receive those alert messages from the event hub, and send a temp alert message back down to the device.
-
-1. Open the **Azure Portal** (https://portal.azure.com) in the browser, and close any blades open from previous steps. Then click "**+ New**" | "**Compute**" | "**Function App**":
-
- > **Note**: Function apps may appear in your portal under the **Virtual Machines** category instead of **Compute**.
-
- ![New Function App](images/12065-NewFunctionApp.png)
-
-1. Complete the properties as follows then click the "**Create**" button:
-
- - App name - ***<name>functions***
- - Subscription - **Chose the same subscription used for the previous resources**
- - Resource group - Choose "**Use existing**" and select the ***<name>group*** resource group created previously
- - Hosting Plan - Choose "**App Service Plan**"
- - App Service Plan - Select the ***<name>plan*** plan we created previously.
- - Storage Account - Select "**Create New**" and name it ***<name>storage***
- - Pin to dashboard - **Checked**
-
- ![New Function App](images/12070-NewFunctionApp.png)
-
-1. When the new Function App is deployed, and its blade open's in the portal, click the "**+New Function**" button, and select "**EventHubTrigger - C#**"
-
- ![Event Hub Trigger C# Function](images/12080-NewCSharpEventHubTriggerFunction.png)
-
-1. **SCROLL DOWN** to continue configuring the new function.
- - Name - **TempAlert**
- - Event Hub Name - ***<name>alerts***
- - Event Hub connection - Click the "**new**" link
-
- ![New Function Properties](images/12090-NewFunctionProperties.png)
-
-1. On the "**Service Bus connection**" blade, click "**Add a connection string**" the configure the properties on the "**Add Service Bus connection**" blade as follows and click "**OK**":
-
- - Connection name - ***<name>ns***
- - Connection string - Copy the value from your Event Hub for the "**Root Manage Shared Access Key SAS Policy Primary Connection String:**" from the "**[myresources.txt](./myresources.txt)**" (The connection string should start with "**`Endpoint=sb://`**" and contain "**`SharedAccessKeyName=RootManageSharedAccessKey`**") file and paste it in here. This connection string gives your Azure Function app permissions to connect to your Service Bus Namespace, and Event Hub with all the permissions it needs.
-
- ![Event Hub Connection String](images/12100-EventHubConnection.png)
-
-1. Finally, click the "**Create**" button to create the function:
-
- ![Create the Function](images/12100-CreateTheFunction.png)
-
-1. Once the "**TempAlert**" function is created, click on the "**Integrate**" link to configure its triggers, inputs and outputs. Our function comes pre-provisioned with an "**Azure Event Hub Trigger**" that will invoke this function whenever a message is available on the ***<name>alerts*** event hub. When the function is invoked the Event Hub message that caused the function to be triggered will be passed in as the **myEventHubMessage** parameter. The functions code can then inspect that parameter's value and act on it.
-
- > **Note**: There isn't a **SAVE** button or anything. These changes are being written into the functions configuration files as you enter them.
-
- - Event parameter name: **`myEventHubMessage`**
- - Event Hub consumer group: **$Default**
- - Event Hub name: ***`alerts`***
- - Event Hub connection: ***`ns`***
-
- ![Function Integration](images/12110-FunctionIntegration.png)
-
-1. Switch back to the "**Develop**" page for the function. The code for our C# function is stored in the "**run.csx**"" file. The default code simply logs the contents of the "myEventHubMessage" parameter value to the console.
-
- ![Default Code](images/12120-DefaultCode.png)
-
-1. We are going to replace the default code with something that provides a little more functionality. Back in Visual Studio Code, open the "**[HOLs\FunctionApp\TempAlert\run.csx](.//FunctionApp/TempAlert/run.csx)**" file. If you receive a prompt from Visual Studio Code to fix unresolved dependencies, click "**Close**". Once the file is open though, copy the entire contents of "**run.csx**" to the clipboard:
-
- ![Copy Run.csx](images/12126-CopyRunCsxToClipboard.png)
-
-1. Back in the portal, replace the entire contents of the run.csx file with the code you just copied, then click the "**Save**" button to save the changes:
-
- ![Paste Code into run.csx in the Portal](images/12128-PasteCodeInRunCsx.png)
-
-1. The code we pasted into "**run.csx**" depends on some libraries (like `Microsoft.Azure.Devices` and `Newtonsoft.Json` as well as others. To make sure the the libraries are installed on the server, we need to specify them in a "**project.json**" file. To add a "**project.json**" file to our our function, click the "**View Files**"" button, then click the "**+ Add**" button, and name the new file "**project.json**" (all lower case):
-
- ![Add project.json](images/12130-AddProjectJson.png)
-
-1. Back in Visual Studio Code, copy the contents of the "**[HOLs\FunctionApp\TempAlert\project.json](./FunctionApp/TempAlert/project.json)**" file to the clipboard:
-
- ![Copy project.json contents](images/12132-CopyProjectJson.png)
-
-1. And back in the portal, replace the contents of the new "**project.json**" file you just created with the contents you copied from Visual Studio Code:
-
- ![Paste project.json contents](images/12134-PasteProjectJson.png)
-
-1. Next, click on each file ("**function.json**","**project.json**", and "**run.csx**") to review its contents.
-
- ![Review function.json](images/12150-ReviewFunctionJson.png)
-
- ![Review project.json](images/12160-ReviewProjectJson.png)
-
- ![Review run.csx](images/12170-RevewRunCsx.png)
-
-1. In the "**run.csx**" file, locate the line of code that reads (should be at line 31 or so):
-
- ```c#
- static string connectionString = "";
- ```
- and replace `` with the "**IoT Hub "service" SAS Policy Primary Connection String**" value from the "**[myresources.txt](./myresources.txt)**" file
- For example:
-
- ```c#
- static string connectionString = "HostName=mic16iot.azure-devices.net;SharedAccessKeyName=service;SharedAccessKey=wQF6dryjMwQ1mMEwDFfcgkSaSscFthHVVJeIfq6iVWQ=";
- ```
-
- Then click the "**Save**" button to save the changes.
-
- ![Paste 'service' policy connection string](images/12160-PasteServiceConnectionString.png)
-
-1. In the "**Logs**" section, verify that the function compiled successfully.
-
- ![Successful Compilation](images/12180-CompilationSucceeded.png)
-
-1. At this point, your function should be working properly. For it to kick off, we need:
-
- - Our device to publish a message with a temperature sensor value that is higher than the threshold value in our ***<name>job*** Stream Analytics Job query (**40** by default)
- - That will cause an event to be sent to the ***<name>alerts*** event hub
- - That will trigger our function, and allow the function to send a cloud-to-device message back to the Intel NUC via the Azure IoT Hub.
-
-1. To try it out:
-
- > **Note**: If the sensor is already publishing values that exceeds the threshold value in the ***<name>job*** Stream Analytics Job Query (**40** by default), you should begin receive alerts immediately. This can become irritating if the threshold is set to low. If desired, you can go back to your ***<name>job*** Stream Analytics Job Query definition and increase the threshold value. If you do need to do that, you will need to first Stop the Stream Analytics Query, then modify the query, then Start the Stream Analytics Query.
-
- - Warm your hands up by rubbing them vigoursly together, then pinch the Temperature sensor on both sides with your warm fingers and try to get the NUC to generate a reading of the threshold value (again, **40** by default)
-
- ![Pinch Sensor](images/12190-PinchSensorWithWarmFingers.png)
-
- - Watch the "**Logs**" section of the Azure function. You should see the incoming event hub message get logged as well as details of the message being sent to the device:
-
- ![Function Log](images/12200-FunctionLoggingMessage.png)
-
- - And on the LCD panel, you should see the "**Temp Alert**" message displayed along with a brief flash of red:
-
- ![Temp Alert on LCD](images/12210-TempAlertOnLcd.png)
-
- - And lastly, if you have the Buzzer plugged in, you should hear it buzz for one second each time an alert comes in!
-
-1. That's it. Your function works. However by default the app can be shutdown if it isn't being regularly accessed. To keep it active:
-
- - Click the "**Function app settings**" link, then pick **"Go to App Service Settings**"
-
- ![Function App Settings](images/12230-FunctionAppSettings.png)
-
- - Click "**Application settings**" then turn the "**Always On**" setting to "**On**" and click the "**Save**" button along the top.
-
- ![Always On](images/12240-AlwaysOn.png)
-___
-
-
-TIME PERMITTING - Display Temperature Data with Power BI Embedded
----
-
-***AS OF FEBRUARY 2017, THERE ARE SOME ISSUES WITH THE POWERBI-CLI THAT MAY PREVENT YOU FROM SUCCESSFULLY COMPLETING THIS SECTION. FEEL FREE TO PROCEED, BUT BE AWARE YOU MAY ENCOUNTER ISSUES. AS THE ISSUES WITH POWERBI-CLI ARE RESOLVED, THIS SECTION WILL BE UPDATED TO REFLECT THOSE CHANGES.***
-
-In this task, we'll walk through publishing a pre-created Power BI report into a Power BI collection and Workspace in your Azure Subscription. You can learn more about Power BI Embedded here: azure.microsoft.com/en-us/services/power-bi-embedded/
-
-1. You **DO NOT NEED** to edit the report provided, however, if you would like to see how it was authored, and your are on a Windows system, you can download "**Power BI Desktop**" from powerbi.microsoft.com/en-us/desktop/. Once you have downloaded it, you can open the "**HOLs\PowerBI\TemperatureChart.pbix**" file to view how it was designed. **REGARDLESS, DO NOT MAKE CHANGES TO THE REPORT AT THIS TIME!**
-
- ![Report in Power BI Desktop](images/13010-TemperatureChartReportInDesktop.png)
-
-1. To publish the report to Power BI Embedded, we need to first create a "**Power BI Embedded Workspace Collection**". Open the **Azure Portal** (https://portal.azure.com) and close any blades left open from previous steps. Then click "**+ New**" | "**Intelligence + analytics**" | "**Power BI Embedded**"
-
- ![New Power BI Embedded Workspace Collection](images/13020-NewPowerBiEmbeddedCollection.png)
-
-1. Complete the properties for the new collection as follows, then click "**Create**" to create it:
-
- - Workspace Collection Name - ***<name>collection***
- - Subscription - **Chose the same subscription used for the previous resources**
- - Resource group - Choose "**Use existing**" and select the ***<name>group*** resource group created previously
- - Location - **Use the same location as the previous resources**
- - Pricing - Leave as "**Standard**"
- - Pin to dashboard - **Checked**
-
- ![Collection Properties](images/13030-PowerBiCollectionProperties.png)
-
-1. Once the new Power BI Embedded Workspace Collection is created, click the "**Access keys**" button along the right of its blade:
-
- ![Access Keys](images/13040-CollectionAccessKeys.png)
-
-1. Then click the icon to the right of the "**KEY 1**" value to copy it to the clipboard:
-
- ![Key 1](images/13050-AccessKey1.png)
-
-1. And document your collection name and key in the "**[myresources.txt](./myresources.txt)**" file.
-
- ![Document Collection](images/13060-DocumentCollectionNameAndKey.png)
-
-1. A "**Workspace Collection**" is just what it sounds like, it is a collection of one or more "**Workspace**" instances. To upload a report, it must go into a Workspace, but at the time of this writing you can't create new Workspaces in the Azure portal. The rest of our interaction with the Power BI Embedded service will be via the "**powerbi-cli** npm package (link). Open a command prompt or terminal window and issue the following npm command to install the "**powerbi-cli**" package globally:
-
- > **Note**: Some users on non-Windows OSs are having issues with the powerbi-cli at least as recently as v1.0.6. If you are having issues using the powerbi-cli commands you may want to try it from a Windows machine if you have access to one. If you are at an event, find a fellow participant with Windows that will let you run the powerbi-cli commands from their computer. You can create your own folder on their machine store store the powerbi config created for your collection, and they can easily delete that folder when you are done.
-
- ```text
- npm install -g powerbi-cli
- ```
-
-1. Make sure the powerbi-cli version is at LEAST v1.0.8 or later. If you installed the powerbi-cli previously, the version might be lower. Go ahead and run the `npm install -g powerbi-cli` command from above to ensure you are running the latest version:
-
- ```bash
- powerbi --version
- ```
-
- Example output:
-
- ```bash
- 1.0.8
- ```
-
-
-1. Once it is installed, in the command prompt or terminal window, change into the "**HOLs\PowerBI**" folder, and run the following command and use the collection name and key you just pasted into the "**[myresources.txt](./myresources.txt)**" file to tell the powerbi how to connect to our workspace collection:
-
- > **Note**: The `powerbi config` command creates a `.powerbirc` file in the directory where the command was executed. It contains sensitive connection information about how to connect to to your Power BI Embedded Workspace Collection so be careful who you expose that file to.
-
- > **Note**: If you receive an error similar to `powerbi-config(1) does not exist, try --help` you may want to refer to Microsoft/PowerBI-Cli#5 (comment) for a possible workaround. Or, as an alternative you can simply supply the key, collection name, and workspace id for each powerbi command you execute. You do not NEED to do the powerbi config commands, it just helps make future statements easier.
-
- ```text
- powerbi config -c -k ""
- ```
- For example:
-
- ```text
- powerbi config -c mic16collection -k "BoeKHkxkB/JuHsXTRsgUSegrvNnMC97YgycKJYXKDY7q9v5nbSxpoJkfvMmvMr68CrAi1iQVv0KgCpjlVtLIxw=="
- ```
-
- Which returns this output as shown below. The values shown here are stored in the ".powerbirc" file in the current directory:
-
- ```text
- [ powerbi ] collection: mic16collection
- [ powerbi ] accessKey: BoeKHkxkB/JuHsXTRsgUSegrvNnMC97YgycKJYXKDY7q9v5nbSxpoJkfvMmvMr68CrAi1iQVv0KgCpjlVtLIxw==
- ```
-
-1. Now, we can create a new "**Workspace**". Workspaces are the containers that we upload reports into. Use the following command:
-
- ```text
- powerbi create-workspace
- ```
- Which returns output similar to the following, showing the Id of the workspace that was created:
-
- > **Note**: When a PowerBI Embedded Collection has just been created, you may get intermittent errors when attempting to connect to it. If you get an error, first verify that you are using the proper values as arguments, and then continue to repeat the statement until it succeeds.
-
- ```text
- [ powerbi ] Workspace created: 9c3b7e34-4a86-4c9b-9534-f9f3953e7f92
- ```
-
- Then save the new Workspace Id to the config so you don't have to enter it each time:
-
- ```text
- powerbi config -w
- ```
-
- For example:
-
- ```text
- powerbi config -w 9c3b7e34-4a86-4c9b-9534-f9f3953e7f92
- ```
-
- Which returns:
-
- ```text
- [ powerbi ] collection: mic16collection
- [ powerbi ] accessKey: BoeKHkxkB/JuHsXTRsgUSegrvNnMC97YgycKJYXKDY7q9v5nbSxpoJkfvMmvMr68CrAi1iQVv0KgCpjlVtLIxw==
- [ powerbi ] workspace: 9c3b7e34-4a86-4c9b-9534-f9f3953e7f92
- ```
-
- Finally, copy the new Workspace ID returned from the statement above and past it into the "**[myresources.txt](./myresources.txt)**" file.
-
- ![Workspace ID Documented](images/13070-WorkspaceIdDocumented.png)
-
-1. Now we can upload our report (the TemperatureChart.pbix file) into our new workspace:
-
- > **Note**: These commands assume you are in the "**HOLs\PowerBI**" folder.
-
- ```text
- powerbi import -n -f
- ```
-
- For example:
-
- ```text
- powerbi import -n "TemperatureChart" -f "TemperatureChart.pbix"
- ```
-
- Which returns something like:
-
- ```text
- [ powerbi ] Importing TemperatureChart.pbix to workspace: 9c3b7e34-4a86-4c9b-9534-f9f3953e7f92
- [ powerbi ] File uploaded successfully
- [ powerbi ] Import ID: b3cd9de9-11e5-473c-9b55-0e569c89a756
- [ powerbi ] Checking import state: Publishing
- [ powerbi ] Checking import state: Succeeded
- [ powerbi ] Import succeeded
- ```
-
-1. Next, we'll retrieve the unique IDs for the report, and the dataset in it:
-
- ```text
- powerbi get-reports
- ```
-
- Returns:
-
- ```text
- [ powerbi ] =========================================
- [ powerbi ] Gettings reports for Collection: 9c3b7e34-4a86-4c9b-9534-f9f3953e7f92
- [ powerbi ] =========================================
- [ powerbi ] ID: 9cc7d690-2d22-4d8c-be13-66b8d9349167 | Name: TemperatureChart
- ```
-
- And
-
- ```text
- powerbi get-datasets
- ```
-
- Returns
-
- ```text
- =========================================
- Gettings datasets for Collection: 9c3b7e34-4a86-4c9b-9534-f9f3953e7f92
- =========================================
- ID: ed212c12-0335-414d-b0f1-d4e1be1268da | Name: TemperatureChart
- ```
-
-1. Copy the report and Data set IDs returned from the last two statements and past them into the "**[myresources.txt](./myresources.txt)**" file.
-
- ![Document Report and Dataset IDs](images/13073-DocumentReportAndDataset.png)
-
-1. The last step on the report side is to update the connection information for the Dataset in the report to point to our Azure SQL Database, on our Azure SQL Server with our login credentials.
-
- We need to create a connection string in the right format. Here is the template for the connection string:
-
- ```text
- "data source=sql.database.windows.net;initial catalog=db;persist security info=True;encrypt=True;trustservercertificate=False"
- ```
-
- Replace the ***<name>sql*** and ***<name>db*** values above with your own. For example:
-
- ```text
- "data source=mic16sql.database.windows.net;initial catalog=mic16db;persist security info=True;encrypt=True;trustservercertificate=False"
- ```
-1. Copy the connection string and paste it into the "**[myresources.txt](./myresources.txt)**" file:
-
- ![Document Connection String](images/13075-DocumentConnectionString.png)
-
-1. Next, use the values for our Dataset ID, SQL Login Name and Password, and the Connection String from above to complete the following statement:
-
- ```text
- powerbi update-connection --dataset "" --username --password "" --connectionString ""
- ```
-
- For example:
-
- ```text
- powerbi update-connection --dataset "ed212c12-0335-414d-b0f1-d4e1be1268da" --username sqladmin --password "P@ssw0rd" --connectionString "data source=mic16sql.database.windows.net;initial catalog=mic16db;persist security info=True;encrypt=True;trustservercertificate=False"
- ```
- Which returns something similar to:
-
- > **Note**: Sometimes this fails. If you get an error, double check your parameters, **but even if they are correct, simply running the command a second or third time may work**.
-
- ```text
- [ powerbi ] Found dataset!
- [ powerbi ] Id: ed212c12-0335-414d-b0f1-d4e1be1268da
- [ powerbi ] Name: TemperatureChart
- [ powerbi ] Updating connection string...
- [ powerbi ] Getting gateway datasources...
- [ powerbi ] Connection string successfully updated
- [ powerbi ] Dataset: ed212c12-0335-414d-b0f1-d4e1be1268da
- [ powerbi ] ConnectionString: data source=mic16sql.database.windows.net;initial catalog=mic16db;persist security info=T
- rue;encrypt=True;trustservercertificate=False
- [ powerbi ] Found 1 Datasources for Dataset ed212c12-0335-414d-b0f1-d4e1be1268da
- [ powerbi ] --------------------------------------------------------------------
- [ powerbi ] Datesource ID: dbb612f8-06c8-481c-984d-3b3e28391cf3
- [ powerbi ] Gateway ID: 8b37fcc6-be5a-47e3-a48d-9d9390b29338
- [ powerbi ] Credential Type: undefined
- [ powerbi ] Datasource Type: Sql
- [ powerbi ] Updating datasource credentials...
- [ powerbi ] Successfully updated datasource credentials!
- [ powerbi ] Datasource ID: dbb612f8-06c8-481c-984d-3b3e28391cf3
- [ powerbi ] Gateway ID: 8b37fcc6-be5a-47e3-a48d-9d9390b29338
- ```
-
-1. Ok, the last step is to actually embed the report into our web app. Most of the code has already been written for us, we just need to make a few quick changes. To get started, open the "**HOLs\WebApp"** folder directly in ***A SEPARATE INSTANCE*** of "**Visual Studio Code**" just as you did when you were working with the web app previously. Use the values you've saved in the "**[myresources.txt](./myresources.txt)**" file to complete the "powerbi*" config settings in the config.json file, and **Save** your changes:
-
- ![Power BI Config Settings](images/13080-PowerBIConfigValues.png)
-
- For Example:
-
- ![Power BI Config Completed](images/13085-PowerBIConfigCompleted.png)
-
-1. Next, open the "**public/index.html**" file and locate the code as shown below. The div that will contain our embedded report has been commented out, and a placeholder `` is being displayed instead. We need to switch that around so the `` is commented out, and the `` is availabe.
-
- ```html
-
-
- ```
- and switch them around so the `` tag is commented out, and the `` tag isn't
-
- ```html
-
-
- ```
-
- ![Commented Out Image](images/13095-CommentedOutImg.png)
-
-1. Next, near the bottom of the "**public\index.html**" file locate the following code:
-
- ```javascript
- //Uncomment the following line of code to embed the Power BI report.
- //$scope.embedReport();
- ```
-
- And uncomment the `$scope.embedReport();` statement:
-
- ```javascript
- //Uncomment the following line of code to embed the Power BI report.
- $scope.embedReport();
- ```
-
- ![Uncomment embedReport() call](images/13110-EmbedReportCallUncommented.png)
-
- This will cause some code to run when the page is loaded to embed the report into the `` container we uncommented above. **Save** your changes.
-
-1. The following information is just FYI, you don't need to do anything with this code:
-
- The embedReport() function we are calling above calls into the backend node.js application hosted in server.js to retrieve a valid "**embed token**" for the report.
-
- ```javascript
- $scope.embedReport = function () {
- //Get just the very latest measurements from the node.js server back end
- $http.get('/api/powerbiembedconfig').success(function(config) {
-
- if(config) {
- var powerbiReport = angular.element( document.querySelector( '#powerbiReport' ) )[0];
- powerbi.embed(powerbiReport,config);
- }
- });
- }
- ```
-
- The `/api/powerbiembedconfig` route on the backend server in server.js uses the **powerbi-api** node.js library to create a "**JSON Web Token**" or "**JWT**" token that the embedded request uses to authenticate with the Power BI Embedded service. The "**JWT**" token is signed by your Workspace Collection's Access Key which is known by the backend server, but not the front end web application:
-
- ```javascript
- app.get('/api/powerbiembedconfig',
- function(req,res){
- //FYI, http://calebb.net and http://jwt.io have token decoders you can use to inspect the generated token.
-
- // Set the expiration to 24 hours from now:
- var username = null; //Not creating a user specific token
- var roles = null; //Not creating a role specific token
- var expiration = new Date();
- expiration.setHours(expiration.getHours() + 24);
-
- // Get the other parameters from the variables we initialized
- // previously with values from the config.json file.
- // Then generate a valid Power BI Report Embed token with the values.
- var token = powerbi.PowerBIToken.createReportEmbedToken(
- powerbiCollectionName,
- powerbiWorkspaceId,
- powerbiReportId,
- username,
- roles,
- expiration);
- // And sign it with the provided Power Bi Access key
- // Again, this value comes from the config.json file
- var jwt = token.generate(powerbiAccessKey);
-
- // Create the required embed configuration for the
- // web client front end to use
- var embedConfig = {
- type: 'report',
- accessToken: jwt,
- id: powerbiReportId,
- embedUrl: 'https://embedded.powerbi.com/appTokenReportEmbed'
- };
-
- // And pass that config back to the user as the response.
- res.json(embedConfig);
- }
- );
- ```
-
-1. Regardless, we should be done. Let's commit our changes into the git repo, and sync with the Azure Web App repo in Azure..
-
- In Visual Studio Code, click the "**git**" icon on the left, add a comment to the commit message box at the top, and click the "**checkmark**" icon to commit your changes:
-
- ![Commit Changes](images/13120-CommitChanges.png)
-
-1. Then still on the "**git**" panel, click the "**...**" ellipsis button at the top, and select "**Sync**" to push our changes up to the Azure Web Apps.
-
- ![Sync Changes](images/13130-SyncWithAzure.png)
-
-1. Then, in the **Azure Portal** (https://portal.azure.com), on the "**Deployment options**" for your ***<name>web*** Web App, verify that the deployment succeeds:
-
- ![Verify Deployment](images/13140-VerifyTheDeploymentSucceeded.png)
-
-1. Then in your browser open the web site in azure (`***http://web.azurewebsites.net`***) and check out the new report!
-
- ![Embedded Report Visible](images/13150-EmbeddedReportVisibleInAzure.png)
-
-1. The page is set to refresh automatically every 30 seconds. You should see that the report updates the data it displays as well!
-
___
diff --git a/README.md b/README.md
index b025369..294ab00 100644
--- a/README.md
+++ b/README.md
@@ -5,8 +5,9 @@ Welcome to the Microsoft / Intel IoT Camp!
In this camp you'll get an opportunity to work with the latest commercial grade IoT hardware from Intel and a wide array of "Internet of Things" related services in Microsoft Azure.
-Check out the [slides](Slides)
+# Hands on Labs Document
+Get started with the [Hands on Labs](HOLs)
-or
+# Download slides
+Check out the [slides](Slides)
-Get started with the [Hands on Labs](HOLs)
\ No newline at end of file
diff --git a/Slides/MicrosoftAzureIoTOverview.pptx b/Slides/1. Microsoft Azure IoT Workshop.pptx
similarity index 95%
rename from Slides/MicrosoftAzureIoTOverview.pptx
rename to Slides/1. Microsoft Azure IoT Workshop.pptx
index 3e863f4..5cf13a6 100644
Binary files a/Slides/MicrosoftAzureIoTOverview.pptx and b/Slides/1. Microsoft Azure IoT Workshop.pptx differ
diff --git a/Slides/2. Walk In Deck.pptx b/Slides/2. Walk In Deck.pptx
new file mode 100644
index 0000000..1d7b656
Binary files /dev/null and b/Slides/2. Walk In Deck.pptx differ
diff --git a/Slides/3. Microsoft Azure IoT Reference Architecture.pptx b/Slides/3. Microsoft Azure IoT Reference Architecture.pptx
new file mode 100644
index 0000000..282641a
Binary files /dev/null and b/Slides/3. Microsoft Azure IoT Reference Architecture.pptx differ
diff --git a/Slides/Microsoft Intel IoT Camp Gateway Overview.pdf b/Slides/Microsoft Intel IoT Camp Gateway Overview.pdf
deleted file mode 100644
index 664eaa1..0000000
Binary files a/Slides/Microsoft Intel IoT Camp Gateway Overview.pdf and /dev/null differ
diff --git a/Slides/Microsoft Intel IoT Camp Train the Trainer.pptx b/Slides/Microsoft Intel IoT Camp Train the Trainer.pptx
deleted file mode 100644
index 6001ff7..0000000
Binary files a/Slides/Microsoft Intel IoT Camp Train the Trainer.pptx and /dev/null differ
diff --git a/Slides/MicrosoftIntelIoTCampLabArchitecture.pptx b/Slides/MicrosoftIntelIoTCampLabArchitecture.pptx
deleted file mode 100644
index d99c158..0000000
Binary files a/Slides/MicrosoftIntelIoTCampLabArchitecture.pptx and /dev/null differ
diff --git a/Slides/Walk In Deck.pptx b/Slides/Walk In Deck.pptx
deleted file mode 100644
index b47be1d..0000000
Binary files a/Slides/Walk In Deck.pptx and /dev/null differ
diff --git a/Slides/readme.md b/Slides/readme.md
index de815c7..e0bda39 100644
--- a/Slides/readme.md
+++ b/Slides/readme.md
@@ -1,6 +1,6 @@
# Get the slides!
+## Presentaion slides.
+- Azure IoT Camp
-- Walk In Deck
-- Microsoft Azure IoT Overview
- - Cameron Skinner and Doug Seven presented this deck at BUILD 2016. [Watch Session B815](https://channel9.msdn.com/Events/Build/2016/B815)
-- Intel IoT Gateway Overview
+### IoT refreence architecture
+- IoT refreence architecture