diff --git a/af-ZA/images/banner.png b/af-ZA/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/af-ZA/images/banner.png differ diff --git a/af-ZA/images/cheese-sandwich-annotated.png b/af-ZA/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/af-ZA/images/cheese-sandwich-annotated.png differ diff --git a/af-ZA/images/cheese-sandwich.png b/af-ZA/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/af-ZA/images/cheese-sandwich.png differ diff --git a/af-ZA/images/click-flag-annotated.png b/af-ZA/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/af-ZA/images/click-flag-annotated.png differ diff --git a/af-ZA/images/click-flag.png b/af-ZA/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/af-ZA/images/click-flag.png differ diff --git a/af-ZA/images/code-new-blocks.png b/af-ZA/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/af-ZA/images/code-new-blocks.png differ diff --git a/af-ZA/images/code-with-confidence.png b/af-ZA/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/af-ZA/images/code-with-confidence.png differ diff --git a/af-ZA/images/create-project-annotated.png b/af-ZA/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/af-ZA/images/create-project-annotated.png differ diff --git a/af-ZA/images/create-project.png b/af-ZA/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/af-ZA/images/create-project.png differ diff --git a/af-ZA/images/empty-buckets.png b/af-ZA/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/af-ZA/images/empty-buckets.png differ diff --git a/af-ZA/images/full-buckets.png b/af-ZA/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/af-ZA/images/full-buckets.png differ diff --git a/af-ZA/images/open-scratch-3-annotated.png b/af-ZA/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/af-ZA/images/open-scratch-3-annotated.png differ diff --git a/af-ZA/images/open-scratch-3.png b/af-ZA/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/af-ZA/images/open-scratch-3.png differ diff --git a/af-ZA/images/project-make-annotated.png b/af-ZA/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/af-ZA/images/project-make-annotated.png differ diff --git a/af-ZA/images/project-make.png b/af-ZA/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/af-ZA/images/project-make.png differ diff --git a/af-ZA/images/project-templates-annotated.png b/af-ZA/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/af-ZA/images/project-templates-annotated.png differ diff --git a/af-ZA/images/project-templates.png b/af-ZA/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/af-ZA/images/project-templates.png differ diff --git a/af-ZA/images/project-train-annotated.png b/af-ZA/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/af-ZA/images/project-train-annotated.png differ diff --git a/af-ZA/images/projects-list-annotated.png b/af-ZA/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/af-ZA/images/projects-list-annotated.png differ diff --git a/af-ZA/images/projects-list.png b/af-ZA/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/af-ZA/images/projects-list.png differ diff --git a/af-ZA/images/scratch-template-annotated.png b/af-ZA/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/af-ZA/images/scratch-template-annotated.png differ diff --git a/af-ZA/images/scratch-template.png b/af-ZA/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/af-ZA/images/scratch-template.png differ diff --git a/af-ZA/images/smart-classroom.gif b/af-ZA/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/af-ZA/images/smart-classroom.gif differ diff --git a/af-ZA/images/test-new-model-annotated.png b/af-ZA/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/af-ZA/images/test-new-model-annotated.png differ diff --git a/af-ZA/images/test-with-new-blocks-annotated.png b/af-ZA/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/af-ZA/images/test-with-new-blocks-annotated.png differ diff --git a/af-ZA/images/test-with-new-blocks.png b/af-ZA/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/af-ZA/images/test-with-new-blocks.png differ diff --git a/af-ZA/images/train-new-model-annotated.png b/af-ZA/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/af-ZA/images/train-new-model-annotated.png differ diff --git a/af-ZA/images/train-new-model.png b/af-ZA/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/af-ZA/images/train-new-model.png differ diff --git a/af-ZA/images/what-you-will-make.png b/af-ZA/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/af-ZA/images/what-you-will-make.png differ diff --git a/af-ZA/meta.yml b/af-ZA/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/af-ZA/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/af-ZA/step_1.md b/af-ZA/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/af-ZA/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/af-ZA/step_2.md b/af-ZA/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/af-ZA/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/af-ZA/step_3.md b/af-ZA/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/af-ZA/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/af-ZA/step_4.md b/af-ZA/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/af-ZA/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/af-ZA/step_5.md b/af-ZA/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/af-ZA/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/af-ZA/step_6.md b/af-ZA/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/af-ZA/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/af-ZA/step_7.md b/af-ZA/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/af-ZA/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/af-ZA/step_8.md b/af-ZA/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/af-ZA/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/af-ZA/step_9.md b/af-ZA/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/af-ZA/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/ar-SA/images/banner.png b/ar-SA/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/ar-SA/images/banner.png differ diff --git a/ar-SA/images/cheese-sandwich-annotated.png b/ar-SA/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/ar-SA/images/cheese-sandwich-annotated.png differ diff --git a/ar-SA/images/cheese-sandwich.png b/ar-SA/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/ar-SA/images/cheese-sandwich.png differ diff --git a/ar-SA/images/click-flag-annotated.png b/ar-SA/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/ar-SA/images/click-flag-annotated.png differ diff --git a/ar-SA/images/click-flag.png b/ar-SA/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/ar-SA/images/click-flag.png differ diff --git a/ar-SA/images/code-new-blocks.png b/ar-SA/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/ar-SA/images/code-new-blocks.png differ diff --git a/ar-SA/images/code-with-confidence.png b/ar-SA/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/ar-SA/images/code-with-confidence.png differ diff --git a/ar-SA/images/create-project-annotated.png b/ar-SA/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/ar-SA/images/create-project-annotated.png differ diff --git a/ar-SA/images/create-project.png b/ar-SA/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/ar-SA/images/create-project.png differ diff --git a/ar-SA/images/empty-buckets.png b/ar-SA/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/ar-SA/images/empty-buckets.png differ diff --git a/ar-SA/images/full-buckets.png b/ar-SA/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/ar-SA/images/full-buckets.png differ diff --git a/ar-SA/images/open-scratch-3-annotated.png b/ar-SA/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/ar-SA/images/open-scratch-3-annotated.png differ diff --git a/ar-SA/images/open-scratch-3.png b/ar-SA/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/ar-SA/images/open-scratch-3.png differ diff --git a/ar-SA/images/project-make-annotated.png b/ar-SA/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/ar-SA/images/project-make-annotated.png differ diff --git a/ar-SA/images/project-make.png b/ar-SA/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/ar-SA/images/project-make.png differ diff --git a/ar-SA/images/project-templates-annotated.png b/ar-SA/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/ar-SA/images/project-templates-annotated.png differ diff --git a/ar-SA/images/project-templates.png b/ar-SA/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/ar-SA/images/project-templates.png differ diff --git a/ar-SA/images/project-train-annotated.png b/ar-SA/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/ar-SA/images/project-train-annotated.png differ diff --git a/ar-SA/images/projects-list-annotated.png b/ar-SA/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/ar-SA/images/projects-list-annotated.png differ diff --git a/ar-SA/images/projects-list.png b/ar-SA/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/ar-SA/images/projects-list.png differ diff --git a/ar-SA/images/scratch-template-annotated.png b/ar-SA/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/ar-SA/images/scratch-template-annotated.png differ diff --git a/ar-SA/images/scratch-template.png b/ar-SA/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/ar-SA/images/scratch-template.png differ diff --git a/ar-SA/images/smart-classroom.gif b/ar-SA/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/ar-SA/images/smart-classroom.gif differ diff --git a/ar-SA/images/test-new-model-annotated.png b/ar-SA/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/ar-SA/images/test-new-model-annotated.png differ diff --git a/ar-SA/images/test-with-new-blocks-annotated.png b/ar-SA/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/ar-SA/images/test-with-new-blocks-annotated.png differ diff --git a/ar-SA/images/test-with-new-blocks.png b/ar-SA/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/ar-SA/images/test-with-new-blocks.png differ diff --git a/ar-SA/images/train-new-model-annotated.png b/ar-SA/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/ar-SA/images/train-new-model-annotated.png differ diff --git a/ar-SA/images/train-new-model.png b/ar-SA/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/ar-SA/images/train-new-model.png differ diff --git a/ar-SA/images/what-you-will-make.png b/ar-SA/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/ar-SA/images/what-you-will-make.png differ diff --git a/ar-SA/meta.yml b/ar-SA/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/ar-SA/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/ar-SA/step_1.md b/ar-SA/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/ar-SA/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/ar-SA/step_2.md b/ar-SA/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/ar-SA/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/ar-SA/step_3.md b/ar-SA/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/ar-SA/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/ar-SA/step_4.md b/ar-SA/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/ar-SA/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/ar-SA/step_5.md b/ar-SA/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/ar-SA/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/ar-SA/step_6.md b/ar-SA/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/ar-SA/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/ar-SA/step_7.md b/ar-SA/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/ar-SA/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/ar-SA/step_8.md b/ar-SA/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/ar-SA/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/ar-SA/step_9.md b/ar-SA/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/ar-SA/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/ca-ES/images/banner.png b/ca-ES/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/ca-ES/images/banner.png differ diff --git a/ca-ES/images/cheese-sandwich-annotated.png b/ca-ES/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/ca-ES/images/cheese-sandwich-annotated.png differ diff --git a/ca-ES/images/cheese-sandwich.png b/ca-ES/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/ca-ES/images/cheese-sandwich.png differ diff --git a/ca-ES/images/click-flag-annotated.png b/ca-ES/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/ca-ES/images/click-flag-annotated.png differ diff --git a/ca-ES/images/click-flag.png b/ca-ES/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/ca-ES/images/click-flag.png differ diff --git a/ca-ES/images/code-new-blocks.png b/ca-ES/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/ca-ES/images/code-new-blocks.png differ diff --git a/ca-ES/images/code-with-confidence.png b/ca-ES/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/ca-ES/images/code-with-confidence.png differ diff --git a/ca-ES/images/create-project-annotated.png b/ca-ES/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/ca-ES/images/create-project-annotated.png differ diff --git a/ca-ES/images/create-project.png b/ca-ES/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/ca-ES/images/create-project.png differ diff --git a/ca-ES/images/empty-buckets.png b/ca-ES/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/ca-ES/images/empty-buckets.png differ diff --git a/ca-ES/images/full-buckets.png b/ca-ES/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/ca-ES/images/full-buckets.png differ diff --git a/ca-ES/images/open-scratch-3-annotated.png b/ca-ES/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/ca-ES/images/open-scratch-3-annotated.png differ diff --git a/ca-ES/images/open-scratch-3.png b/ca-ES/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/ca-ES/images/open-scratch-3.png differ diff --git a/ca-ES/images/project-make-annotated.png b/ca-ES/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/ca-ES/images/project-make-annotated.png differ diff --git a/ca-ES/images/project-make.png b/ca-ES/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/ca-ES/images/project-make.png differ diff --git a/ca-ES/images/project-templates-annotated.png b/ca-ES/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/ca-ES/images/project-templates-annotated.png differ diff --git a/ca-ES/images/project-templates.png b/ca-ES/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/ca-ES/images/project-templates.png differ diff --git a/ca-ES/images/project-train-annotated.png b/ca-ES/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/ca-ES/images/project-train-annotated.png differ diff --git a/ca-ES/images/projects-list-annotated.png b/ca-ES/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/ca-ES/images/projects-list-annotated.png differ diff --git a/ca-ES/images/projects-list.png b/ca-ES/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/ca-ES/images/projects-list.png differ diff --git a/ca-ES/images/scratch-template-annotated.png b/ca-ES/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/ca-ES/images/scratch-template-annotated.png differ diff --git a/ca-ES/images/scratch-template.png b/ca-ES/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/ca-ES/images/scratch-template.png differ diff --git a/ca-ES/images/smart-classroom.gif b/ca-ES/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/ca-ES/images/smart-classroom.gif differ diff --git a/ca-ES/images/test-new-model-annotated.png b/ca-ES/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/ca-ES/images/test-new-model-annotated.png differ diff --git a/ca-ES/images/test-with-new-blocks-annotated.png b/ca-ES/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/ca-ES/images/test-with-new-blocks-annotated.png differ diff --git a/ca-ES/images/test-with-new-blocks.png b/ca-ES/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/ca-ES/images/test-with-new-blocks.png differ diff --git a/ca-ES/images/train-new-model-annotated.png b/ca-ES/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/ca-ES/images/train-new-model-annotated.png differ diff --git a/ca-ES/images/train-new-model.png b/ca-ES/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/ca-ES/images/train-new-model.png differ diff --git a/ca-ES/images/what-you-will-make.png b/ca-ES/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/ca-ES/images/what-you-will-make.png differ diff --git a/ca-ES/meta.yml b/ca-ES/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/ca-ES/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/ca-ES/step_1.md b/ca-ES/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/ca-ES/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/ca-ES/step_2.md b/ca-ES/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/ca-ES/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/ca-ES/step_3.md b/ca-ES/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/ca-ES/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/ca-ES/step_4.md b/ca-ES/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/ca-ES/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/ca-ES/step_5.md b/ca-ES/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/ca-ES/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/ca-ES/step_6.md b/ca-ES/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/ca-ES/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/ca-ES/step_7.md b/ca-ES/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/ca-ES/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/ca-ES/step_8.md b/ca-ES/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/ca-ES/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/ca-ES/step_9.md b/ca-ES/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/ca-ES/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/cs-CZ/images/banner.png b/cs-CZ/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/cs-CZ/images/banner.png differ diff --git a/cs-CZ/images/cheese-sandwich-annotated.png b/cs-CZ/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/cs-CZ/images/cheese-sandwich-annotated.png differ diff --git a/cs-CZ/images/cheese-sandwich.png b/cs-CZ/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/cs-CZ/images/cheese-sandwich.png differ diff --git a/cs-CZ/images/click-flag-annotated.png b/cs-CZ/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/cs-CZ/images/click-flag-annotated.png differ diff --git a/cs-CZ/images/click-flag.png b/cs-CZ/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/cs-CZ/images/click-flag.png differ diff --git a/cs-CZ/images/code-new-blocks.png b/cs-CZ/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/cs-CZ/images/code-new-blocks.png differ diff --git a/cs-CZ/images/code-with-confidence.png b/cs-CZ/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/cs-CZ/images/code-with-confidence.png differ diff --git a/cs-CZ/images/create-project-annotated.png b/cs-CZ/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/cs-CZ/images/create-project-annotated.png differ diff --git a/cs-CZ/images/create-project.png b/cs-CZ/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/cs-CZ/images/create-project.png differ diff --git a/cs-CZ/images/empty-buckets.png b/cs-CZ/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/cs-CZ/images/empty-buckets.png differ diff --git a/cs-CZ/images/full-buckets.png b/cs-CZ/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/cs-CZ/images/full-buckets.png differ diff --git a/cs-CZ/images/open-scratch-3-annotated.png b/cs-CZ/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/cs-CZ/images/open-scratch-3-annotated.png differ diff --git a/cs-CZ/images/open-scratch-3.png b/cs-CZ/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/cs-CZ/images/open-scratch-3.png differ diff --git a/cs-CZ/images/project-make-annotated.png b/cs-CZ/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/cs-CZ/images/project-make-annotated.png differ diff --git a/cs-CZ/images/project-make.png b/cs-CZ/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/cs-CZ/images/project-make.png differ diff --git a/cs-CZ/images/project-templates-annotated.png b/cs-CZ/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/cs-CZ/images/project-templates-annotated.png differ diff --git a/cs-CZ/images/project-templates.png b/cs-CZ/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/cs-CZ/images/project-templates.png differ diff --git a/cs-CZ/images/project-train-annotated.png b/cs-CZ/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/cs-CZ/images/project-train-annotated.png differ diff --git a/cs-CZ/images/projects-list-annotated.png b/cs-CZ/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/cs-CZ/images/projects-list-annotated.png differ diff --git a/cs-CZ/images/projects-list.png b/cs-CZ/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/cs-CZ/images/projects-list.png differ diff --git a/cs-CZ/images/scratch-template-annotated.png b/cs-CZ/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/cs-CZ/images/scratch-template-annotated.png differ diff --git a/cs-CZ/images/scratch-template.png b/cs-CZ/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/cs-CZ/images/scratch-template.png differ diff --git a/cs-CZ/images/smart-classroom.gif b/cs-CZ/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/cs-CZ/images/smart-classroom.gif differ diff --git a/cs-CZ/images/test-new-model-annotated.png b/cs-CZ/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/cs-CZ/images/test-new-model-annotated.png differ diff --git a/cs-CZ/images/test-with-new-blocks-annotated.png b/cs-CZ/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/cs-CZ/images/test-with-new-blocks-annotated.png differ diff --git a/cs-CZ/images/test-with-new-blocks.png b/cs-CZ/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/cs-CZ/images/test-with-new-blocks.png differ diff --git a/cs-CZ/images/train-new-model-annotated.png b/cs-CZ/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/cs-CZ/images/train-new-model-annotated.png differ diff --git a/cs-CZ/images/train-new-model.png b/cs-CZ/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/cs-CZ/images/train-new-model.png differ diff --git a/cs-CZ/images/what-you-will-make.png b/cs-CZ/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/cs-CZ/images/what-you-will-make.png differ diff --git a/cs-CZ/meta.yml b/cs-CZ/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/cs-CZ/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/cs-CZ/step_1.md b/cs-CZ/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/cs-CZ/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/cs-CZ/step_2.md b/cs-CZ/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/cs-CZ/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/cs-CZ/step_3.md b/cs-CZ/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/cs-CZ/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/cs-CZ/step_4.md b/cs-CZ/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/cs-CZ/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/cs-CZ/step_5.md b/cs-CZ/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/cs-CZ/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/cs-CZ/step_6.md b/cs-CZ/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/cs-CZ/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/cs-CZ/step_7.md b/cs-CZ/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/cs-CZ/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/cs-CZ/step_8.md b/cs-CZ/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/cs-CZ/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/cs-CZ/step_9.md b/cs-CZ/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/cs-CZ/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/da-DK/images/banner.png b/da-DK/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/da-DK/images/banner.png differ diff --git a/da-DK/images/cheese-sandwich-annotated.png b/da-DK/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/da-DK/images/cheese-sandwich-annotated.png differ diff --git a/da-DK/images/cheese-sandwich.png b/da-DK/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/da-DK/images/cheese-sandwich.png differ diff --git a/da-DK/images/click-flag-annotated.png b/da-DK/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/da-DK/images/click-flag-annotated.png differ diff --git a/da-DK/images/click-flag.png b/da-DK/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/da-DK/images/click-flag.png differ diff --git a/da-DK/images/code-new-blocks.png b/da-DK/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/da-DK/images/code-new-blocks.png differ diff --git a/da-DK/images/code-with-confidence.png b/da-DK/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/da-DK/images/code-with-confidence.png differ diff --git a/da-DK/images/create-project-annotated.png b/da-DK/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/da-DK/images/create-project-annotated.png differ diff --git a/da-DK/images/create-project.png b/da-DK/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/da-DK/images/create-project.png differ diff --git a/da-DK/images/empty-buckets.png b/da-DK/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/da-DK/images/empty-buckets.png differ diff --git a/da-DK/images/full-buckets.png b/da-DK/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/da-DK/images/full-buckets.png differ diff --git a/da-DK/images/open-scratch-3-annotated.png b/da-DK/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/da-DK/images/open-scratch-3-annotated.png differ diff --git a/da-DK/images/open-scratch-3.png b/da-DK/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/da-DK/images/open-scratch-3.png differ diff --git a/da-DK/images/project-make-annotated.png b/da-DK/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/da-DK/images/project-make-annotated.png differ diff --git a/da-DK/images/project-make.png b/da-DK/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/da-DK/images/project-make.png differ diff --git a/da-DK/images/project-templates-annotated.png b/da-DK/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/da-DK/images/project-templates-annotated.png differ diff --git a/da-DK/images/project-templates.png b/da-DK/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/da-DK/images/project-templates.png differ diff --git a/da-DK/images/project-train-annotated.png b/da-DK/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/da-DK/images/project-train-annotated.png differ diff --git a/da-DK/images/projects-list-annotated.png b/da-DK/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/da-DK/images/projects-list-annotated.png differ diff --git a/da-DK/images/projects-list.png b/da-DK/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/da-DK/images/projects-list.png differ diff --git a/da-DK/images/scratch-template-annotated.png b/da-DK/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/da-DK/images/scratch-template-annotated.png differ diff --git a/da-DK/images/scratch-template.png b/da-DK/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/da-DK/images/scratch-template.png differ diff --git a/da-DK/images/smart-classroom.gif b/da-DK/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/da-DK/images/smart-classroom.gif differ diff --git a/da-DK/images/test-new-model-annotated.png b/da-DK/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/da-DK/images/test-new-model-annotated.png differ diff --git a/da-DK/images/test-with-new-blocks-annotated.png b/da-DK/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/da-DK/images/test-with-new-blocks-annotated.png differ diff --git a/da-DK/images/test-with-new-blocks.png b/da-DK/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/da-DK/images/test-with-new-blocks.png differ diff --git a/da-DK/images/train-new-model-annotated.png b/da-DK/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/da-DK/images/train-new-model-annotated.png differ diff --git a/da-DK/images/train-new-model.png b/da-DK/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/da-DK/images/train-new-model.png differ diff --git a/da-DK/images/what-you-will-make.png b/da-DK/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/da-DK/images/what-you-will-make.png differ diff --git a/da-DK/meta.yml b/da-DK/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/da-DK/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/da-DK/step_1.md b/da-DK/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/da-DK/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/da-DK/step_2.md b/da-DK/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/da-DK/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/da-DK/step_3.md b/da-DK/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/da-DK/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/da-DK/step_4.md b/da-DK/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/da-DK/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/da-DK/step_5.md b/da-DK/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/da-DK/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/da-DK/step_6.md b/da-DK/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/da-DK/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/da-DK/step_7.md b/da-DK/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/da-DK/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/da-DK/step_8.md b/da-DK/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/da-DK/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/da-DK/step_9.md b/da-DK/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/da-DK/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/de-DE/images/banner.png b/de-DE/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/de-DE/images/banner.png differ diff --git a/de-DE/images/cheese-sandwich-annotated.png b/de-DE/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/de-DE/images/cheese-sandwich-annotated.png differ diff --git a/de-DE/images/cheese-sandwich.png b/de-DE/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/de-DE/images/cheese-sandwich.png differ diff --git a/de-DE/images/click-flag-annotated.png b/de-DE/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/de-DE/images/click-flag-annotated.png differ diff --git a/de-DE/images/click-flag.png b/de-DE/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/de-DE/images/click-flag.png differ diff --git a/de-DE/images/code-new-blocks.png b/de-DE/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/de-DE/images/code-new-blocks.png differ diff --git a/de-DE/images/code-with-confidence.png b/de-DE/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/de-DE/images/code-with-confidence.png differ diff --git a/de-DE/images/create-project-annotated.png b/de-DE/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/de-DE/images/create-project-annotated.png differ diff --git a/de-DE/images/create-project.png b/de-DE/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/de-DE/images/create-project.png differ diff --git a/de-DE/images/empty-buckets.png b/de-DE/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/de-DE/images/empty-buckets.png differ diff --git a/de-DE/images/full-buckets.png b/de-DE/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/de-DE/images/full-buckets.png differ diff --git a/de-DE/images/open-scratch-3-annotated.png b/de-DE/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/de-DE/images/open-scratch-3-annotated.png differ diff --git a/de-DE/images/open-scratch-3.png b/de-DE/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/de-DE/images/open-scratch-3.png differ diff --git a/de-DE/images/project-make-annotated.png b/de-DE/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/de-DE/images/project-make-annotated.png differ diff --git a/de-DE/images/project-make.png b/de-DE/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/de-DE/images/project-make.png differ diff --git a/de-DE/images/project-templates-annotated.png b/de-DE/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/de-DE/images/project-templates-annotated.png differ diff --git a/de-DE/images/project-templates.png b/de-DE/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/de-DE/images/project-templates.png differ diff --git a/de-DE/images/project-train-annotated.png b/de-DE/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/de-DE/images/project-train-annotated.png differ diff --git a/de-DE/images/projects-list-annotated.png b/de-DE/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/de-DE/images/projects-list-annotated.png differ diff --git a/de-DE/images/projects-list.png b/de-DE/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/de-DE/images/projects-list.png differ diff --git a/de-DE/images/scratch-template-annotated.png b/de-DE/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/de-DE/images/scratch-template-annotated.png differ diff --git a/de-DE/images/scratch-template.png b/de-DE/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/de-DE/images/scratch-template.png differ diff --git a/de-DE/images/smart-classroom.gif b/de-DE/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/de-DE/images/smart-classroom.gif differ diff --git a/de-DE/images/test-new-model-annotated.png b/de-DE/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/de-DE/images/test-new-model-annotated.png differ diff --git a/de-DE/images/test-with-new-blocks-annotated.png b/de-DE/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/de-DE/images/test-with-new-blocks-annotated.png differ diff --git a/de-DE/images/test-with-new-blocks.png b/de-DE/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/de-DE/images/test-with-new-blocks.png differ diff --git a/de-DE/images/train-new-model-annotated.png b/de-DE/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/de-DE/images/train-new-model-annotated.png differ diff --git a/de-DE/images/train-new-model.png b/de-DE/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/de-DE/images/train-new-model.png differ diff --git a/de-DE/images/what-you-will-make.png b/de-DE/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/de-DE/images/what-you-will-make.png differ diff --git a/de-DE/meta.yml b/de-DE/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/de-DE/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/de-DE/step_1.md b/de-DE/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/de-DE/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/de-DE/step_2.md b/de-DE/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/de-DE/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/de-DE/step_3.md b/de-DE/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/de-DE/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/de-DE/step_4.md b/de-DE/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/de-DE/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/de-DE/step_5.md b/de-DE/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/de-DE/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/de-DE/step_6.md b/de-DE/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/de-DE/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/de-DE/step_7.md b/de-DE/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/de-DE/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/de-DE/step_8.md b/de-DE/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/de-DE/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/de-DE/step_9.md b/de-DE/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/de-DE/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/el-GR/images/banner.png b/el-GR/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/el-GR/images/banner.png differ diff --git a/el-GR/images/cheese-sandwich-annotated.png b/el-GR/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/el-GR/images/cheese-sandwich-annotated.png differ diff --git a/el-GR/images/cheese-sandwich.png b/el-GR/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/el-GR/images/cheese-sandwich.png differ diff --git a/el-GR/images/click-flag-annotated.png b/el-GR/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/el-GR/images/click-flag-annotated.png differ diff --git a/el-GR/images/click-flag.png b/el-GR/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/el-GR/images/click-flag.png differ diff --git a/el-GR/images/code-new-blocks.png b/el-GR/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/el-GR/images/code-new-blocks.png differ diff --git a/el-GR/images/code-with-confidence.png b/el-GR/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/el-GR/images/code-with-confidence.png differ diff --git a/el-GR/images/create-project-annotated.png b/el-GR/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/el-GR/images/create-project-annotated.png differ diff --git a/el-GR/images/create-project.png b/el-GR/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/el-GR/images/create-project.png differ diff --git a/el-GR/images/empty-buckets.png b/el-GR/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/el-GR/images/empty-buckets.png differ diff --git a/el-GR/images/full-buckets.png b/el-GR/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/el-GR/images/full-buckets.png differ diff --git a/el-GR/images/open-scratch-3-annotated.png b/el-GR/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/el-GR/images/open-scratch-3-annotated.png differ diff --git a/el-GR/images/open-scratch-3.png b/el-GR/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/el-GR/images/open-scratch-3.png differ diff --git a/el-GR/images/project-make-annotated.png b/el-GR/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/el-GR/images/project-make-annotated.png differ diff --git a/el-GR/images/project-make.png b/el-GR/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/el-GR/images/project-make.png differ diff --git a/el-GR/images/project-templates-annotated.png b/el-GR/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/el-GR/images/project-templates-annotated.png differ diff --git a/el-GR/images/project-templates.png b/el-GR/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/el-GR/images/project-templates.png differ diff --git a/el-GR/images/project-train-annotated.png b/el-GR/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/el-GR/images/project-train-annotated.png differ diff --git a/el-GR/images/projects-list-annotated.png b/el-GR/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/el-GR/images/projects-list-annotated.png differ diff --git a/el-GR/images/projects-list.png b/el-GR/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/el-GR/images/projects-list.png differ diff --git a/el-GR/images/scratch-template-annotated.png b/el-GR/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/el-GR/images/scratch-template-annotated.png differ diff --git a/el-GR/images/scratch-template.png b/el-GR/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/el-GR/images/scratch-template.png differ diff --git a/el-GR/images/smart-classroom.gif b/el-GR/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/el-GR/images/smart-classroom.gif differ diff --git a/el-GR/images/test-new-model-annotated.png b/el-GR/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/el-GR/images/test-new-model-annotated.png differ diff --git a/el-GR/images/test-with-new-blocks-annotated.png b/el-GR/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/el-GR/images/test-with-new-blocks-annotated.png differ diff --git a/el-GR/images/test-with-new-blocks.png b/el-GR/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/el-GR/images/test-with-new-blocks.png differ diff --git a/el-GR/images/train-new-model-annotated.png b/el-GR/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/el-GR/images/train-new-model-annotated.png differ diff --git a/el-GR/images/train-new-model.png b/el-GR/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/el-GR/images/train-new-model.png differ diff --git a/el-GR/images/what-you-will-make.png b/el-GR/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/el-GR/images/what-you-will-make.png differ diff --git a/el-GR/meta.yml b/el-GR/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/el-GR/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/el-GR/step_1.md b/el-GR/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/el-GR/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/el-GR/step_2.md b/el-GR/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/el-GR/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/el-GR/step_3.md b/el-GR/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/el-GR/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/el-GR/step_4.md b/el-GR/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/el-GR/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/el-GR/step_5.md b/el-GR/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/el-GR/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/el-GR/step_6.md b/el-GR/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/el-GR/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/el-GR/step_7.md b/el-GR/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/el-GR/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/el-GR/step_8.md b/el-GR/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/el-GR/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/el-GR/step_9.md b/el-GR/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/el-GR/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/en-US/images/banner.png b/en-US/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/en-US/images/banner.png differ diff --git a/en-US/images/cheese-sandwich-annotated.png b/en-US/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/en-US/images/cheese-sandwich-annotated.png differ diff --git a/en-US/images/cheese-sandwich.png b/en-US/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/en-US/images/cheese-sandwich.png differ diff --git a/en-US/images/click-flag-annotated.png b/en-US/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/en-US/images/click-flag-annotated.png differ diff --git a/en-US/images/click-flag.png b/en-US/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/en-US/images/click-flag.png differ diff --git a/en-US/images/code-new-blocks.png b/en-US/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/en-US/images/code-new-blocks.png differ diff --git a/en-US/images/code-with-confidence.png b/en-US/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/en-US/images/code-with-confidence.png differ diff --git a/en-US/images/create-project-annotated.png b/en-US/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/en-US/images/create-project-annotated.png differ diff --git a/en-US/images/create-project.png b/en-US/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/en-US/images/create-project.png differ diff --git a/en-US/images/empty-buckets.png b/en-US/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/en-US/images/empty-buckets.png differ diff --git a/en-US/images/full-buckets.png b/en-US/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/en-US/images/full-buckets.png differ diff --git a/en-US/images/open-scratch-3-annotated.png b/en-US/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/en-US/images/open-scratch-3-annotated.png differ diff --git a/en-US/images/open-scratch-3.png b/en-US/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/en-US/images/open-scratch-3.png differ diff --git a/en-US/images/project-make-annotated.png b/en-US/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/en-US/images/project-make-annotated.png differ diff --git a/en-US/images/project-make.png b/en-US/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/en-US/images/project-make.png differ diff --git a/en-US/images/project-templates-annotated.png b/en-US/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/en-US/images/project-templates-annotated.png differ diff --git a/en-US/images/project-templates.png b/en-US/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/en-US/images/project-templates.png differ diff --git a/en-US/images/project-train-annotated.png b/en-US/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/en-US/images/project-train-annotated.png differ diff --git a/en-US/images/projects-list-annotated.png b/en-US/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/en-US/images/projects-list-annotated.png differ diff --git a/en-US/images/projects-list.png b/en-US/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/en-US/images/projects-list.png differ diff --git a/en-US/images/scratch-template-annotated.png b/en-US/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/en-US/images/scratch-template-annotated.png differ diff --git a/en-US/images/scratch-template.png b/en-US/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/en-US/images/scratch-template.png differ diff --git a/en-US/images/smart-classroom.gif b/en-US/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/en-US/images/smart-classroom.gif differ diff --git a/en-US/images/test-new-model-annotated.png b/en-US/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/en-US/images/test-new-model-annotated.png differ diff --git a/en-US/images/test-with-new-blocks-annotated.png b/en-US/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/en-US/images/test-with-new-blocks-annotated.png differ diff --git a/en-US/images/test-with-new-blocks.png b/en-US/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/en-US/images/test-with-new-blocks.png differ diff --git a/en-US/images/train-new-model-annotated.png b/en-US/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/en-US/images/train-new-model-annotated.png differ diff --git a/en-US/images/train-new-model.png b/en-US/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/en-US/images/train-new-model.png differ diff --git a/en-US/images/what-you-will-make.png b/en-US/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/en-US/images/what-you-will-make.png differ diff --git a/en-US/meta.yml b/en-US/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/en-US/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/en-US/step_1.md b/en-US/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/en-US/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/en-US/step_2.md b/en-US/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/en-US/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/en-US/step_3.md b/en-US/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/en-US/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/en-US/step_4.md b/en-US/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/en-US/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/en-US/step_5.md b/en-US/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/en-US/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/en-US/step_6.md b/en-US/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/en-US/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/en-US/step_7.md b/en-US/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/en-US/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/en-US/step_8.md b/en-US/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/en-US/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/en-US/step_9.md b/en-US/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/en-US/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/es-ES/images/banner.png b/es-ES/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/es-ES/images/banner.png differ diff --git a/es-ES/images/cheese-sandwich-annotated.png b/es-ES/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/es-ES/images/cheese-sandwich-annotated.png differ diff --git a/es-ES/images/cheese-sandwich.png b/es-ES/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/es-ES/images/cheese-sandwich.png differ diff --git a/es-ES/images/click-flag-annotated.png b/es-ES/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/es-ES/images/click-flag-annotated.png differ diff --git a/es-ES/images/click-flag.png b/es-ES/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/es-ES/images/click-flag.png differ diff --git a/es-ES/images/code-new-blocks.png b/es-ES/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/es-ES/images/code-new-blocks.png differ diff --git a/es-ES/images/code-with-confidence.png b/es-ES/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/es-ES/images/code-with-confidence.png differ diff --git a/es-ES/images/create-project-annotated.png b/es-ES/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/es-ES/images/create-project-annotated.png differ diff --git a/es-ES/images/create-project.png b/es-ES/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/es-ES/images/create-project.png differ diff --git a/es-ES/images/empty-buckets.png b/es-ES/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/es-ES/images/empty-buckets.png differ diff --git a/es-ES/images/full-buckets.png b/es-ES/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/es-ES/images/full-buckets.png differ diff --git a/es-ES/images/open-scratch-3-annotated.png b/es-ES/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/es-ES/images/open-scratch-3-annotated.png differ diff --git a/es-ES/images/open-scratch-3.png b/es-ES/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/es-ES/images/open-scratch-3.png differ diff --git a/es-ES/images/project-make-annotated.png b/es-ES/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/es-ES/images/project-make-annotated.png differ diff --git a/es-ES/images/project-make.png b/es-ES/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/es-ES/images/project-make.png differ diff --git a/es-ES/images/project-templates-annotated.png b/es-ES/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/es-ES/images/project-templates-annotated.png differ diff --git a/es-ES/images/project-templates.png b/es-ES/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/es-ES/images/project-templates.png differ diff --git a/es-ES/images/project-train-annotated.png b/es-ES/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/es-ES/images/project-train-annotated.png differ diff --git a/es-ES/images/projects-list-annotated.png b/es-ES/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/es-ES/images/projects-list-annotated.png differ diff --git a/es-ES/images/projects-list.png b/es-ES/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/es-ES/images/projects-list.png differ diff --git a/es-ES/images/scratch-template-annotated.png b/es-ES/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/es-ES/images/scratch-template-annotated.png differ diff --git a/es-ES/images/scratch-template.png b/es-ES/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/es-ES/images/scratch-template.png differ diff --git a/es-ES/images/smart-classroom.gif b/es-ES/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/es-ES/images/smart-classroom.gif differ diff --git a/es-ES/images/test-new-model-annotated.png b/es-ES/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/es-ES/images/test-new-model-annotated.png differ diff --git a/es-ES/images/test-with-new-blocks-annotated.png b/es-ES/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/es-ES/images/test-with-new-blocks-annotated.png differ diff --git a/es-ES/images/test-with-new-blocks.png b/es-ES/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/es-ES/images/test-with-new-blocks.png differ diff --git a/es-ES/images/train-new-model-annotated.png b/es-ES/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/es-ES/images/train-new-model-annotated.png differ diff --git a/es-ES/images/train-new-model.png b/es-ES/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/es-ES/images/train-new-model.png differ diff --git a/es-ES/images/what-you-will-make.png b/es-ES/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/es-ES/images/what-you-will-make.png differ diff --git a/es-ES/meta.yml b/es-ES/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/es-ES/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/es-ES/step_1.md b/es-ES/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/es-ES/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/es-ES/step_2.md b/es-ES/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/es-ES/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/es-ES/step_3.md b/es-ES/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/es-ES/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/es-ES/step_4.md b/es-ES/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/es-ES/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/es-ES/step_5.md b/es-ES/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/es-ES/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/es-ES/step_6.md b/es-ES/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/es-ES/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/es-ES/step_7.md b/es-ES/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/es-ES/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/es-ES/step_8.md b/es-ES/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/es-ES/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/es-ES/step_9.md b/es-ES/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/es-ES/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/fi-FI/images/banner.png b/fi-FI/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/fi-FI/images/banner.png differ diff --git a/fi-FI/images/cheese-sandwich-annotated.png b/fi-FI/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/fi-FI/images/cheese-sandwich-annotated.png differ diff --git a/fi-FI/images/cheese-sandwich.png b/fi-FI/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/fi-FI/images/cheese-sandwich.png differ diff --git a/fi-FI/images/click-flag-annotated.png b/fi-FI/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/fi-FI/images/click-flag-annotated.png differ diff --git a/fi-FI/images/click-flag.png b/fi-FI/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/fi-FI/images/click-flag.png differ diff --git a/fi-FI/images/code-new-blocks.png b/fi-FI/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/fi-FI/images/code-new-blocks.png differ diff --git a/fi-FI/images/code-with-confidence.png b/fi-FI/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/fi-FI/images/code-with-confidence.png differ diff --git a/fi-FI/images/create-project-annotated.png b/fi-FI/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/fi-FI/images/create-project-annotated.png differ diff --git a/fi-FI/images/create-project.png b/fi-FI/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/fi-FI/images/create-project.png differ diff --git a/fi-FI/images/empty-buckets.png b/fi-FI/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/fi-FI/images/empty-buckets.png differ diff --git a/fi-FI/images/full-buckets.png b/fi-FI/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/fi-FI/images/full-buckets.png differ diff --git a/fi-FI/images/open-scratch-3-annotated.png b/fi-FI/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/fi-FI/images/open-scratch-3-annotated.png differ diff --git a/fi-FI/images/open-scratch-3.png b/fi-FI/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/fi-FI/images/open-scratch-3.png differ diff --git a/fi-FI/images/project-make-annotated.png b/fi-FI/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/fi-FI/images/project-make-annotated.png differ diff --git a/fi-FI/images/project-make.png b/fi-FI/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/fi-FI/images/project-make.png differ diff --git a/fi-FI/images/project-templates-annotated.png b/fi-FI/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/fi-FI/images/project-templates-annotated.png differ diff --git a/fi-FI/images/project-templates.png b/fi-FI/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/fi-FI/images/project-templates.png differ diff --git a/fi-FI/images/project-train-annotated.png b/fi-FI/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/fi-FI/images/project-train-annotated.png differ diff --git a/fi-FI/images/projects-list-annotated.png b/fi-FI/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/fi-FI/images/projects-list-annotated.png differ diff --git a/fi-FI/images/projects-list.png b/fi-FI/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/fi-FI/images/projects-list.png differ diff --git a/fi-FI/images/scratch-template-annotated.png b/fi-FI/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/fi-FI/images/scratch-template-annotated.png differ diff --git a/fi-FI/images/scratch-template.png b/fi-FI/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/fi-FI/images/scratch-template.png differ diff --git a/fi-FI/images/smart-classroom.gif b/fi-FI/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/fi-FI/images/smart-classroom.gif differ diff --git a/fi-FI/images/test-new-model-annotated.png b/fi-FI/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/fi-FI/images/test-new-model-annotated.png differ diff --git a/fi-FI/images/test-with-new-blocks-annotated.png b/fi-FI/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/fi-FI/images/test-with-new-blocks-annotated.png differ diff --git a/fi-FI/images/test-with-new-blocks.png b/fi-FI/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/fi-FI/images/test-with-new-blocks.png differ diff --git a/fi-FI/images/train-new-model-annotated.png b/fi-FI/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/fi-FI/images/train-new-model-annotated.png differ diff --git a/fi-FI/images/train-new-model.png b/fi-FI/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/fi-FI/images/train-new-model.png differ diff --git a/fi-FI/images/what-you-will-make.png b/fi-FI/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/fi-FI/images/what-you-will-make.png differ diff --git a/fi-FI/meta.yml b/fi-FI/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/fi-FI/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/fi-FI/step_1.md b/fi-FI/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/fi-FI/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/fi-FI/step_2.md b/fi-FI/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/fi-FI/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/fi-FI/step_3.md b/fi-FI/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/fi-FI/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/fi-FI/step_4.md b/fi-FI/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/fi-FI/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/fi-FI/step_5.md b/fi-FI/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/fi-FI/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/fi-FI/step_6.md b/fi-FI/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/fi-FI/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/fi-FI/step_7.md b/fi-FI/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/fi-FI/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/fi-FI/step_8.md b/fi-FI/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/fi-FI/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/fi-FI/step_9.md b/fi-FI/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/fi-FI/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/fr-FR/images/banner.png b/fr-FR/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/fr-FR/images/banner.png differ diff --git a/fr-FR/images/cheese-sandwich-annotated.png b/fr-FR/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/fr-FR/images/cheese-sandwich-annotated.png differ diff --git a/fr-FR/images/cheese-sandwich.png b/fr-FR/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/fr-FR/images/cheese-sandwich.png differ diff --git a/fr-FR/images/classroom-devices.png b/fr-FR/images/classroom-devices.png new file mode 100644 index 0000000..a25ed48 Binary files /dev/null and b/fr-FR/images/classroom-devices.png differ diff --git a/fr-FR/images/click-flag-annotated.png b/fr-FR/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/fr-FR/images/click-flag-annotated.png differ diff --git a/fr-FR/images/click-flag.png b/fr-FR/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/fr-FR/images/click-flag.png differ diff --git a/fr-FR/images/code-new-blocks.png b/fr-FR/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/fr-FR/images/code-new-blocks.png differ diff --git a/fr-FR/images/code-with-confidence.png b/fr-FR/images/code-with-confidence.png new file mode 100644 index 0000000..08fb60b Binary files /dev/null and b/fr-FR/images/code-with-confidence.png differ diff --git a/fr-FR/images/create-project-annotated.png b/fr-FR/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/fr-FR/images/create-project-annotated.png differ diff --git a/fr-FR/images/create-project.png b/fr-FR/images/create-project.png new file mode 100644 index 0000000..7ce2672 Binary files /dev/null and b/fr-FR/images/create-project.png differ diff --git a/fr-FR/images/empty-buckets.png b/fr-FR/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/fr-FR/images/empty-buckets.png differ diff --git a/fr-FR/images/fan-on-and-off.png b/fr-FR/images/fan-on-and-off.png new file mode 100644 index 0000000..76beb24 Binary files /dev/null and b/fr-FR/images/fan-on-and-off.png differ diff --git a/fr-FR/images/full-buckets.png b/fr-FR/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/fr-FR/images/full-buckets.png differ diff --git a/fr-FR/images/new-blocks-menu.png b/fr-FR/images/new-blocks-menu.png new file mode 100644 index 0000000..b1d716a Binary files /dev/null and b/fr-FR/images/new-blocks-menu.png differ diff --git a/fr-FR/images/open-scratch-3-annotated.png b/fr-FR/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/fr-FR/images/open-scratch-3-annotated.png differ diff --git a/fr-FR/images/open-scratch-3.png b/fr-FR/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/fr-FR/images/open-scratch-3.png differ diff --git a/fr-FR/images/play-music.png b/fr-FR/images/play-music.png new file mode 100644 index 0000000..852902d Binary files /dev/null and b/fr-FR/images/play-music.png differ diff --git a/fr-FR/images/project-make-annotated.png b/fr-FR/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/fr-FR/images/project-make-annotated.png differ diff --git a/fr-FR/images/project-make.png b/fr-FR/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/fr-FR/images/project-make.png differ diff --git a/fr-FR/images/project-templates-annotated.png b/fr-FR/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/fr-FR/images/project-templates-annotated.png differ diff --git a/fr-FR/images/project-templates.png b/fr-FR/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/fr-FR/images/project-templates.png differ diff --git a/fr-FR/images/project-train-annotated.png b/fr-FR/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/fr-FR/images/project-train-annotated.png differ diff --git a/fr-FR/images/project-train.png b/fr-FR/images/project-train.png new file mode 100644 index 0000000..3a3bdb9 Binary files /dev/null and b/fr-FR/images/project-train.png differ diff --git a/fr-FR/images/projects-list-annotated.png b/fr-FR/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/fr-FR/images/projects-list-annotated.png differ diff --git a/fr-FR/images/projects-list.png b/fr-FR/images/projects-list.png new file mode 100644 index 0000000..1721755 Binary files /dev/null and b/fr-FR/images/projects-list.png differ diff --git a/fr-FR/images/save-to-computer.png b/fr-FR/images/save-to-computer.png new file mode 100644 index 0000000..3d67093 Binary files /dev/null and b/fr-FR/images/save-to-computer.png differ diff --git a/fr-FR/images/scratch-template-annotated.png b/fr-FR/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/fr-FR/images/scratch-template-annotated.png differ diff --git a/fr-FR/images/scratch-template.png b/fr-FR/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/fr-FR/images/scratch-template.png differ diff --git a/fr-FR/images/smart-classroom.gif b/fr-FR/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/fr-FR/images/smart-classroom.gif differ diff --git a/fr-FR/images/smart-classroom.png b/fr-FR/images/smart-classroom.png new file mode 100644 index 0000000..e166aaa Binary files /dev/null and b/fr-FR/images/smart-classroom.png differ diff --git a/fr-FR/images/test-model.png b/fr-FR/images/test-model.png new file mode 100644 index 0000000..7dad5b5 Binary files /dev/null and b/fr-FR/images/test-model.png differ diff --git a/fr-FR/images/test-new-model-annotated.png b/fr-FR/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/fr-FR/images/test-new-model-annotated.png differ diff --git a/fr-FR/images/test-with-new-blocks-annotated.png b/fr-FR/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/fr-FR/images/test-with-new-blocks-annotated.png differ diff --git a/fr-FR/images/test-with-new-blocks.png b/fr-FR/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/fr-FR/images/test-with-new-blocks.png differ diff --git a/fr-FR/images/train-new-model-annotated.png b/fr-FR/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/fr-FR/images/train-new-model-annotated.png differ diff --git a/fr-FR/images/train-new-model.png b/fr-FR/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/fr-FR/images/train-new-model.png differ diff --git a/fr-FR/images/turn-fan-off.png b/fr-FR/images/turn-fan-off.png new file mode 100644 index 0000000..00280bb Binary files /dev/null and b/fr-FR/images/turn-fan-off.png differ diff --git a/fr-FR/images/turn-fan-on.png b/fr-FR/images/turn-fan-on.png new file mode 100644 index 0000000..86d49d4 Binary files /dev/null and b/fr-FR/images/turn-fan-on.png differ diff --git a/fr-FR/images/what-you-will-make.png b/fr-FR/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/fr-FR/images/what-you-will-make.png differ diff --git a/fr-FR/images/whatyouwillmake.gif b/fr-FR/images/whatyouwillmake.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/fr-FR/images/whatyouwillmake.gif differ diff --git a/fr-FR/meta.yml b/fr-FR/meta.yml new file mode 100644 index 0000000..04d5514 --- /dev/null +++ b/fr-FR/meta.yml @@ -0,0 +1,24 @@ +--- +title: Smart assistant +hero_image: images/banner.png +description: Create a virtual assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: '2019-05-09' +steps: +- title: What you will make +- title: Set up the project +- title: Example commands + completion: + - engaged +- title: Train the model +- title: Create the assistant +- title: 'Challenge' + challenge: true +- title: Confidence scores + completion: + - internal +- title: What can you do now? + completion: + - external diff --git a/fr-FR/resources/NEW smart assistant 1.mp4 b/fr-FR/resources/NEW smart assistant 1.mp4 new file mode 100644 index 0000000..51c457d Binary files /dev/null and b/fr-FR/resources/NEW smart assistant 1.mp4 differ diff --git a/fr-FR/resources/NEW smart assistant 1.srt b/fr-FR/resources/NEW smart assistant 1.srt new file mode 100644 index 0000000..25e9bad --- /dev/null +++ b/fr-FR/resources/NEW smart assistant 1.srt @@ -0,0 +1,26 @@ +1 +00:00:04,040 --> 00:00:09,880 +Go to rpf.io/ML4K and get started. + +2 +00:00:09,880 --> 00:00:12,640 +Click 'Try it now'. + +3 +00:00:12,640 --> 00:00:16,960 +Add a new project called 'Smart assistant'. + +4 +00:00:16,960 --> 00:00:23,240 +Set it to recognise English text, and  +store the data in your web browser. + +5 +00:00:23,240 --> 00:00:29,000 +Create the project, then  +click on the project's name. + +6 +00:00:29,000 --> 00:00:29,960 +Now, click 'Train'. + diff --git a/fr-FR/resources/NEW smart assistant 2.mp4 b/fr-FR/resources/NEW smart assistant 2.mp4 new file mode 100644 index 0000000..2e90362 Binary files /dev/null and b/fr-FR/resources/NEW smart assistant 2.mp4 differ diff --git a/fr-FR/resources/NEW smart assistant 2.srt b/fr-FR/resources/NEW smart assistant 2.srt new file mode 100644 index 0000000..aec96dd --- /dev/null +++ b/fr-FR/resources/NEW smart assistant 2.srt @@ -0,0 +1,27 @@ +1 +00:00:04,320 --> 00:00:06,080 +Add two new labels + +2 +00:00:06,080 --> 00:00:08,400 +one for fan on, + +3 +00:00:08,400 --> 00:00:12,760 +and one for fan off. + +4 +00:00:12,760 --> 00:00:18,200 +Type an example of a command  +that should turn the fan on. + +5 +00:00:18,200 --> 00:00:23,240 +Then repeat until you have  +eight different examples. + +6 +00:00:23,240 --> 00:00:29,960 +Now you need eight different examples  +of commands to turn the fan off too. + diff --git a/fr-FR/resources/NEW smart assistant 3.mp4 b/fr-FR/resources/NEW smart assistant 3.mp4 new file mode 100644 index 0000000..2fdde5f Binary files /dev/null and b/fr-FR/resources/NEW smart assistant 3.mp4 differ diff --git a/fr-FR/resources/NEW smart assistant 3.srt b/fr-FR/resources/NEW smart assistant 3.srt new file mode 100644 index 0000000..9870fbf --- /dev/null +++ b/fr-FR/resources/NEW smart assistant 3.srt @@ -0,0 +1,19 @@ +1 +00:00:03,600 --> 00:00:10,200 +Click 'Back', then click 'Learn & Test'. + +2 +00:00:10,200 --> 00:00:14,720 +Click the button to start training  +your model - it might take a while. + +3 +00:00:14,720 --> 00:00:18,120 +When it is done, type in a  +command to test the model. + +4 +00:00:18,120 --> 00:00:29,960 +Check that your command gives  +the result you were expecting. + diff --git a/fr-FR/resources/NEW smart assistant 4.mp4 b/fr-FR/resources/NEW smart assistant 4.mp4 new file mode 100644 index 0000000..a295711 Binary files /dev/null and b/fr-FR/resources/NEW smart assistant 4.mp4 differ diff --git a/fr-FR/resources/NEW smart assistant 4.srt b/fr-FR/resources/NEW smart assistant 4.srt new file mode 100644 index 0000000..a68f19a --- /dev/null +++ b/fr-FR/resources/NEW smart assistant 4.srt @@ -0,0 +1,28 @@ +1 +00:00:03,800 --> 00:00:12,600 +Now click 'Back' again, then click  +'Make' to use the model in Scratch. + +2 +00:00:12,600 --> 00:00:20,160 +Go to 'Project templates' and select  +the 'Smart Classroom' template. + +3 +00:00:20,160 --> 00:00:29,960 +Now, drag the code blocks to ask for  +a command and turn the fan on or off. + +4 +00:00:38,160 --> 00:00:48,800 +You can right click and select  +'Duplicate' to save time. + +5 +00:00:48,800 --> 00:00:54,240 +Now type in a command to control the fan. + +6 +00:00:54,240 --> 00:00:59,920 +Type another command to turn it off. + diff --git a/fr-FR/resources/NEW smart assistant 5.mp4 b/fr-FR/resources/NEW smart assistant 5.mp4 new file mode 100644 index 0000000..945ac64 Binary files /dev/null and b/fr-FR/resources/NEW smart assistant 5.mp4 differ diff --git a/fr-FR/resources/NEW smart assistant 5.srt b/fr-FR/resources/NEW smart assistant 5.srt new file mode 100644 index 0000000..f1c1642 --- /dev/null +++ b/fr-FR/resources/NEW smart assistant 5.srt @@ -0,0 +1,33 @@ +1 +00:00:04,120 --> 00:00:07,480 +Go back to the 'Learn & Test' page. + +2 +00:00:07,480 --> 00:00:11,440 +Type something that is not  +related to lamps or fans. + +3 +00:00:11,440 --> 00:00:15,240 +The model should show a low  +confidence about its prediction. + +4 +00:00:15,240 --> 00:00:19,520 +Go back to Scratch, and move all of  +your 'if' statements to one side. + +5 +00:00:19,520 --> 00:00:26,880 +Add an 'if/else' so that your code can  +check the model's confidence level. + +6 +00:00:26,880 --> 00:00:36,640 +The light or fan should only be changed  +if the confidence level is over 70. + +7 +00:00:36,640 --> 00:00:46,640 +Try it out! + diff --git a/fr-FR/resources/readme.txt b/fr-FR/resources/readme.txt new file mode 100644 index 0000000..39ad83b --- /dev/null +++ b/fr-FR/resources/readme.txt @@ -0,0 +1 @@ +Pour regarder une vidéo avec des sous-titres sur VLC (videolan.org), assure-toi que le fichier vidéo et le fichier de sous-titres sont dans le même dossier et ont exactement le même nom (par exemple, video.mp4 et video.srt). Ouvre la vidéo dans VLC, les sous-titres seront chargés automatiquement. Si les sous-titres n'apparaissent pas, fais un clic droit sur l'écran vidéo, va dans **Sous-titres**, puis **Ajouter un fichier de sous-titres**, et sélectionne le bon fichier .srt. Profite du visionnage avec les sous-titres ! \ No newline at end of file diff --git a/fr-FR/step_1.md b/fr-FR/step_1.md new file mode 100644 index 0000000..28b53d0 --- /dev/null +++ b/fr-FR/step_1.md @@ -0,0 +1,33 @@ +## What you will make + +Create a smart virtual assistant that reacts to your commands. + +![A Scratch project with a fan and a light and a box to type in](images/whatyouwillmake.gif) + +\--- collapse --- + +--- + +## title: Where are my commands stored? + +- This project uses a technology called 'machine learning'. Machine learning systems are trained using a large amount of data. +- This project does not require you to create an account or log in. For this project, the examples you use to make the model are only stored temporarily in your browser (only on your machine). + \--- /collapse --- + +## --- collapse --- + +## title: No YouTube? Download the videos! + +You can [download all the videos for this project](https://rpf.io/p/en/smart-assistant-go){:target="_blank"}. + +\--- /collapse --- + +## --- collapse --- + +## title: License + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. + +\--- /collapse --- \ No newline at end of file diff --git a/fr-FR/step_2.md b/fr-FR/step_2.md new file mode 100644 index 0000000..f50a1e4 --- /dev/null +++ b/fr-FR/step_2.md @@ -0,0 +1,38 @@ +## Set up the project + + +
+ +
+ + +\--- task --- + +- Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + +- Click on **Get started**. + +- Click on **Try it now**. + +\--- /task --- + +\--- task --- + +- Click on **Projects** in the menu bar at the top. + +- Click on the **+ Add a new project** button. + +- Name your project `Smart assistant` and set it to learn to recognise **text**, and store data **in your web browser**. Then click on **Create**. + ![Creating a project](images/create-project.png) + +- You should now see 'Smart assistant' in the projects list. Click on the project. + ![Project list with smart assistant listed](images/projects-list.png) + +\--- /task --- + +\--- task --- + +- Click on the **Train** button. + ![Project main menu with arrow pointing to Train button](images/project-train.png) + +\--- /task --- diff --git a/fr-FR/step_3.md b/fr-FR/step_3.md new file mode 100644 index 0000000..f0e0183 --- /dev/null +++ b/fr-FR/step_3.md @@ -0,0 +1,45 @@ +## Example commands + + +
+ +
+ + +The room has two devices: a fan and a light. + +![A fan and a light](images/classroom-devices.png) + +Your assistant needs some examples of the phrases you might say when you want each device to be turned on or off. For example, to **turn the fan on**, you might say: + +- "Turn on the fan" +- "Start the fan" +- "Please could you switch on the fan" +- "I'm too hot" +- "It's hot in here" + +\--- task --- + +- Click on **+ Add new label** on the top right and add the label “fan on”. + +\--- /task --- + +\--- task --- + +- Click on **Add example** and type in `Turn on the fan`. + +\--- /task --- + +\--- task --- + +- Continue to click on the **Add example** button and add different ways of asking for the fan to be turned on until you have eight different ways of asking. + +\--- /task --- + +\--- task --- + +- Click on **Add new label**, but this time create the label "fan off". Add eight different examples of ways you could ask for the fan to be turned off. + +\--- /task --- + +![Fan on and off categories with eight examples of commands in each](images/fan-on-and-off.png) diff --git a/fr-FR/step_4.md b/fr-FR/step_4.md new file mode 100644 index 0000000..ffc4547 --- /dev/null +++ b/fr-FR/step_4.md @@ -0,0 +1,40 @@ +## Train the model + + +
+ +
+ + +Now that you have some example data, you can train the machine learning model to label a command as either 'fan on' or 'fan off' based on your examples. + +\--- task --- + +- Click on the **< Back to project** link, then click on **Learn & Test**. + +\--- /task --- + +\--- task --- + +- Click on the **Train new machine learning model** button. + +\--- /task --- + +Wait for the training to complete — this might take a minute or two. Once the training has completed, a test box appears. + +\--- task --- + +- Type in `put the fan on` and check that the model labels this input as 'fan on'. + ![Type in put the fan on to see if it is recognised](images/test-model.png) + +\--- /task --- + +\--- task --- + +- Try typing in some other commands for turning the fan on and off, and check that they are given the label you expect. + +\--- /task --- + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + + diff --git a/fr-FR/step_5.md b/fr-FR/step_5.md new file mode 100644 index 0000000..35d90e3 --- /dev/null +++ b/fr-FR/step_5.md @@ -0,0 +1,59 @@ +## Create the assistant + + +
+ +
+ + +Now that your model can distinguish between commands, you can use it in a Scratch program to create your smart assistant. + +\--- task --- + +- Click on the **< Back to project** link. + +- Click on **Make**. + +- Click on **Scratch 3**. + +- Click on **Open in Scratch 3**. + +\--- /task --- + +\--- task --- + +- Click on **Project templates** at the top and select the 'Smart classroom' project to load the fan and light sprites. This project also contains pre-made yellow `broadcast` blocks, which can be found under **Events**. + +![Smart classroom project is selected in the Scratch templates](images/smart-classroom.png) + +\--- /task --- + +Machine Learning for Kids has added some special blocks to Scratch to allow you to use the model you just trained. Find them at the bottom of the blocks list. + +![New 'smart assistant' blocks shown in the menu underneath Images](images/new-blocks-menu.png) + +\--- task --- + +- Make sure you have the **Classroom** sprite selected, then click on the **Code** tab and add this code: + +![New scratch code: when flag clicked, forever, ask 'enter your command' and wait. If recognise text (answer) label = fan on, then broadcast turn-fan-on ](images/turn-fan-on.png) + +\--- /task --- + +\--- task --- + +- Right click on the `if` block and select **Duplicate** to add a copy of the whole block of code, and put it directly underneath the first `if`. + +- Change the second copy of the block so that it recognises the text for turning the fan **off**, and broadcasts **turn-fan-off**. + +![New scratch code: If recognise text (answer) label = fan off, then broadcast turn-fan-off](images/turn-fan-off.png) + +\--- /task --- + +\--- task --- + +- Click the **green flag** and type in a command to turn the fan on or off. Check that it has the result you expected. + +- Make sure you test that the assistant performs the correct action **even for commands that you didn’t include as examples**. + +\--- /task --- diff --git a/fr-FR/step_6.md b/fr-FR/step_6.md new file mode 100644 index 0000000..10a58e5 --- /dev/null +++ b/fr-FR/step_6.md @@ -0,0 +1,65 @@ +## Challenge + +\--- challenge --- + +Follow the same steps as before to allow the assistant to also control the light. + +\--- task --- + +- Save a copy of your Scratch project to your computer so that you can easily reload it later to use with your new model. + +![Click on File and then Save to computer](images/save-to-computer.png) + +\--- /task --- + +\--- task --- + +- Go back to your model (**Back to project** > **Train**) and add two more labels: `light_on` and `light_off`. + +\--- /task --- + +\--- task --- + +- Add eight examples of commands you might use to turn the light on. + +\--- /task --- + +\--- task --- + +- Add eight examples of commands you might use to turn the light off. + +\--- /task --- + +\--- task --- + +- Re-train your model (**Back to project** > **Learn and Test**) so that it can also recognise commands for turning the light on and off. + +\--- /task --- + +\--- task --- + +- Load your new model into Scratch (**Make** > **Scratch 3** > **Open in Scratch 3**). + +- In Scratch, reload the code you saved earlier (**File** > **Load from my computer**). + +- Add two more `if` blocks to your program so that you can type commands to control the light. + +## --- collapse --- + +## title: I can't see the blocks for light_on / light_off + +If you have trained a new model, you will need to close Scratch and then re-open it from the Machine Learning for Kids website for any new blocks to appear. + +Click **Make** > **Scratch 3** > **Open in Scratch 3**. + +\--- /collapse --- + +\--- /task --- + +\--- task --- + +- Test whether your program works by typing in commands to turn the light on and off, and checking whether the outcome is as you expected. + +\--- /task --- + +\--- /challenge --- diff --git a/fr-FR/step_7.md b/fr-FR/step_7.md new file mode 100644 index 0000000..2fbec40 --- /dev/null +++ b/fr-FR/step_7.md @@ -0,0 +1,39 @@ +## Confidence scores + + +
+ +
+ + +The model can tell you how **confident** it is about whether it is correct. + +\--- task --- + +- Go back to the **Learn & Test** page in the training tool. + +- Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'play some music'. + +![Result of entering "play some music" is fan on with 36% confidence](images/play-music.png) + +\--- /task --- + +The **confidence score** is the program’s way of telling you how likely it is that it has labelled the command correctly. + +\--- task --- + +- Return to Scratch. + +- Add some new code so that the assistant will tell you it didn't understand the command if the confidence score is less than 70%. + +![New Scratch code: If recognise text (answer) confidence < 70, say 'Sorry I didn't understand that' for 2 seconds](images/code-with-confidence.png) + +\--- /task --- + +\--- task --- + +- Click the **green flag** and test your program to check that your assistant reacts in the right way: + - Type in commands that have nothing to do with the fan or lamp + - Ask for something to be turned on or off + +\--- /task --- diff --git a/fr-FR/step_8.md b/fr-FR/step_8.md new file mode 100644 index 0000000..d4b22e9 --- /dev/null +++ b/fr-FR/step_8.md @@ -0,0 +1,3 @@ +## What can you do now? + +There are lots of other machine learning and AI projects in the [Machine learning with Scratch](https://projects.raspberrypi.org/en/pathways/scratch-machine-learning) pathway. diff --git a/fr-FR/step_9.md b/fr-FR/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/fr-FR/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/he-IL/images/banner.png b/he-IL/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/he-IL/images/banner.png differ diff --git a/he-IL/images/cheese-sandwich-annotated.png b/he-IL/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/he-IL/images/cheese-sandwich-annotated.png differ diff --git a/he-IL/images/cheese-sandwich.png b/he-IL/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/he-IL/images/cheese-sandwich.png differ diff --git a/he-IL/images/click-flag-annotated.png b/he-IL/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/he-IL/images/click-flag-annotated.png differ diff --git a/he-IL/images/click-flag.png b/he-IL/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/he-IL/images/click-flag.png differ diff --git a/he-IL/images/code-new-blocks.png b/he-IL/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/he-IL/images/code-new-blocks.png differ diff --git a/he-IL/images/code-with-confidence.png b/he-IL/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/he-IL/images/code-with-confidence.png differ diff --git a/he-IL/images/create-project-annotated.png b/he-IL/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/he-IL/images/create-project-annotated.png differ diff --git a/he-IL/images/create-project.png b/he-IL/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/he-IL/images/create-project.png differ diff --git a/he-IL/images/empty-buckets.png b/he-IL/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/he-IL/images/empty-buckets.png differ diff --git a/he-IL/images/full-buckets.png b/he-IL/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/he-IL/images/full-buckets.png differ diff --git a/he-IL/images/open-scratch-3-annotated.png b/he-IL/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/he-IL/images/open-scratch-3-annotated.png differ diff --git a/he-IL/images/open-scratch-3.png b/he-IL/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/he-IL/images/open-scratch-3.png differ diff --git a/he-IL/images/project-make-annotated.png b/he-IL/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/he-IL/images/project-make-annotated.png differ diff --git a/he-IL/images/project-make.png b/he-IL/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/he-IL/images/project-make.png differ diff --git a/he-IL/images/project-templates-annotated.png b/he-IL/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/he-IL/images/project-templates-annotated.png differ diff --git a/he-IL/images/project-templates.png b/he-IL/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/he-IL/images/project-templates.png differ diff --git a/he-IL/images/project-train-annotated.png b/he-IL/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/he-IL/images/project-train-annotated.png differ diff --git a/he-IL/images/projects-list-annotated.png b/he-IL/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/he-IL/images/projects-list-annotated.png differ diff --git a/he-IL/images/projects-list.png b/he-IL/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/he-IL/images/projects-list.png differ diff --git a/he-IL/images/scratch-template-annotated.png b/he-IL/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/he-IL/images/scratch-template-annotated.png differ diff --git a/he-IL/images/scratch-template.png b/he-IL/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/he-IL/images/scratch-template.png differ diff --git a/he-IL/images/smart-classroom.gif b/he-IL/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/he-IL/images/smart-classroom.gif differ diff --git a/he-IL/images/test-new-model-annotated.png b/he-IL/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/he-IL/images/test-new-model-annotated.png differ diff --git a/he-IL/images/test-with-new-blocks-annotated.png b/he-IL/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/he-IL/images/test-with-new-blocks-annotated.png differ diff --git a/he-IL/images/test-with-new-blocks.png b/he-IL/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/he-IL/images/test-with-new-blocks.png differ diff --git a/he-IL/images/train-new-model-annotated.png b/he-IL/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/he-IL/images/train-new-model-annotated.png differ diff --git a/he-IL/images/train-new-model.png b/he-IL/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/he-IL/images/train-new-model.png differ diff --git a/he-IL/images/what-you-will-make.png b/he-IL/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/he-IL/images/what-you-will-make.png differ diff --git a/he-IL/meta.yml b/he-IL/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/he-IL/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/he-IL/step_1.md b/he-IL/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/he-IL/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/he-IL/step_2.md b/he-IL/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/he-IL/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/he-IL/step_3.md b/he-IL/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/he-IL/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/he-IL/step_4.md b/he-IL/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/he-IL/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/he-IL/step_5.md b/he-IL/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/he-IL/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/he-IL/step_6.md b/he-IL/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/he-IL/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/he-IL/step_7.md b/he-IL/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/he-IL/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/he-IL/step_8.md b/he-IL/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/he-IL/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/he-IL/step_9.md b/he-IL/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/he-IL/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/hu-HU/images/banner.png b/hu-HU/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/hu-HU/images/banner.png differ diff --git a/hu-HU/images/cheese-sandwich-annotated.png b/hu-HU/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/hu-HU/images/cheese-sandwich-annotated.png differ diff --git a/hu-HU/images/cheese-sandwich.png b/hu-HU/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/hu-HU/images/cheese-sandwich.png differ diff --git a/hu-HU/images/click-flag-annotated.png b/hu-HU/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/hu-HU/images/click-flag-annotated.png differ diff --git a/hu-HU/images/click-flag.png b/hu-HU/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/hu-HU/images/click-flag.png differ diff --git a/hu-HU/images/code-new-blocks.png b/hu-HU/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/hu-HU/images/code-new-blocks.png differ diff --git a/hu-HU/images/code-with-confidence.png b/hu-HU/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/hu-HU/images/code-with-confidence.png differ diff --git a/hu-HU/images/create-project-annotated.png b/hu-HU/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/hu-HU/images/create-project-annotated.png differ diff --git a/hu-HU/images/create-project.png b/hu-HU/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/hu-HU/images/create-project.png differ diff --git a/hu-HU/images/empty-buckets.png b/hu-HU/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/hu-HU/images/empty-buckets.png differ diff --git a/hu-HU/images/full-buckets.png b/hu-HU/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/hu-HU/images/full-buckets.png differ diff --git a/hu-HU/images/open-scratch-3-annotated.png b/hu-HU/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/hu-HU/images/open-scratch-3-annotated.png differ diff --git a/hu-HU/images/open-scratch-3.png b/hu-HU/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/hu-HU/images/open-scratch-3.png differ diff --git a/hu-HU/images/project-make-annotated.png b/hu-HU/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/hu-HU/images/project-make-annotated.png differ diff --git a/hu-HU/images/project-make.png b/hu-HU/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/hu-HU/images/project-make.png differ diff --git a/hu-HU/images/project-templates-annotated.png b/hu-HU/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/hu-HU/images/project-templates-annotated.png differ diff --git a/hu-HU/images/project-templates.png b/hu-HU/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/hu-HU/images/project-templates.png differ diff --git a/hu-HU/images/project-train-annotated.png b/hu-HU/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/hu-HU/images/project-train-annotated.png differ diff --git a/hu-HU/images/projects-list-annotated.png b/hu-HU/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/hu-HU/images/projects-list-annotated.png differ diff --git a/hu-HU/images/projects-list.png b/hu-HU/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/hu-HU/images/projects-list.png differ diff --git a/hu-HU/images/scratch-template-annotated.png b/hu-HU/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/hu-HU/images/scratch-template-annotated.png differ diff --git a/hu-HU/images/scratch-template.png b/hu-HU/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/hu-HU/images/scratch-template.png differ diff --git a/hu-HU/images/smart-classroom.gif b/hu-HU/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/hu-HU/images/smart-classroom.gif differ diff --git a/hu-HU/images/test-new-model-annotated.png b/hu-HU/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/hu-HU/images/test-new-model-annotated.png differ diff --git a/hu-HU/images/test-with-new-blocks-annotated.png b/hu-HU/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/hu-HU/images/test-with-new-blocks-annotated.png differ diff --git a/hu-HU/images/test-with-new-blocks.png b/hu-HU/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/hu-HU/images/test-with-new-blocks.png differ diff --git a/hu-HU/images/train-new-model-annotated.png b/hu-HU/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/hu-HU/images/train-new-model-annotated.png differ diff --git a/hu-HU/images/train-new-model.png b/hu-HU/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/hu-HU/images/train-new-model.png differ diff --git a/hu-HU/images/what-you-will-make.png b/hu-HU/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/hu-HU/images/what-you-will-make.png differ diff --git a/hu-HU/meta.yml b/hu-HU/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/hu-HU/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/hu-HU/step_1.md b/hu-HU/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/hu-HU/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/hu-HU/step_2.md b/hu-HU/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/hu-HU/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/hu-HU/step_3.md b/hu-HU/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/hu-HU/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/hu-HU/step_4.md b/hu-HU/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/hu-HU/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/hu-HU/step_5.md b/hu-HU/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/hu-HU/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/hu-HU/step_6.md b/hu-HU/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/hu-HU/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/hu-HU/step_7.md b/hu-HU/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/hu-HU/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/hu-HU/step_8.md b/hu-HU/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/hu-HU/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/hu-HU/step_9.md b/hu-HU/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/hu-HU/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/it-IT/images/banner.png b/it-IT/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/it-IT/images/banner.png differ diff --git a/it-IT/images/cheese-sandwich-annotated.png b/it-IT/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/it-IT/images/cheese-sandwich-annotated.png differ diff --git a/it-IT/images/cheese-sandwich.png b/it-IT/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/it-IT/images/cheese-sandwich.png differ diff --git a/it-IT/images/click-flag-annotated.png b/it-IT/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/it-IT/images/click-flag-annotated.png differ diff --git a/it-IT/images/click-flag.png b/it-IT/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/it-IT/images/click-flag.png differ diff --git a/it-IT/images/code-new-blocks.png b/it-IT/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/it-IT/images/code-new-blocks.png differ diff --git a/it-IT/images/code-with-confidence.png b/it-IT/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/it-IT/images/code-with-confidence.png differ diff --git a/it-IT/images/create-project-annotated.png b/it-IT/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/it-IT/images/create-project-annotated.png differ diff --git a/it-IT/images/create-project.png b/it-IT/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/it-IT/images/create-project.png differ diff --git a/it-IT/images/empty-buckets.png b/it-IT/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/it-IT/images/empty-buckets.png differ diff --git a/it-IT/images/full-buckets.png b/it-IT/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/it-IT/images/full-buckets.png differ diff --git a/it-IT/images/open-scratch-3-annotated.png b/it-IT/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/it-IT/images/open-scratch-3-annotated.png differ diff --git a/it-IT/images/open-scratch-3.png b/it-IT/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/it-IT/images/open-scratch-3.png differ diff --git a/it-IT/images/project-make-annotated.png b/it-IT/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/it-IT/images/project-make-annotated.png differ diff --git a/it-IT/images/project-make.png b/it-IT/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/it-IT/images/project-make.png differ diff --git a/it-IT/images/project-templates-annotated.png b/it-IT/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/it-IT/images/project-templates-annotated.png differ diff --git a/it-IT/images/project-templates.png b/it-IT/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/it-IT/images/project-templates.png differ diff --git a/it-IT/images/project-train-annotated.png b/it-IT/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/it-IT/images/project-train-annotated.png differ diff --git a/it-IT/images/projects-list-annotated.png b/it-IT/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/it-IT/images/projects-list-annotated.png differ diff --git a/it-IT/images/projects-list.png b/it-IT/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/it-IT/images/projects-list.png differ diff --git a/it-IT/images/scratch-template-annotated.png b/it-IT/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/it-IT/images/scratch-template-annotated.png differ diff --git a/it-IT/images/scratch-template.png b/it-IT/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/it-IT/images/scratch-template.png differ diff --git a/it-IT/images/smart-classroom.gif b/it-IT/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/it-IT/images/smart-classroom.gif differ diff --git a/it-IT/images/test-new-model-annotated.png b/it-IT/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/it-IT/images/test-new-model-annotated.png differ diff --git a/it-IT/images/test-with-new-blocks-annotated.png b/it-IT/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/it-IT/images/test-with-new-blocks-annotated.png differ diff --git a/it-IT/images/test-with-new-blocks.png b/it-IT/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/it-IT/images/test-with-new-blocks.png differ diff --git a/it-IT/images/train-new-model-annotated.png b/it-IT/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/it-IT/images/train-new-model-annotated.png differ diff --git a/it-IT/images/train-new-model.png b/it-IT/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/it-IT/images/train-new-model.png differ diff --git a/it-IT/images/what-you-will-make.png b/it-IT/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/it-IT/images/what-you-will-make.png differ diff --git a/it-IT/meta.yml b/it-IT/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/it-IT/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/it-IT/step_1.md b/it-IT/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/it-IT/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/it-IT/step_2.md b/it-IT/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/it-IT/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/it-IT/step_3.md b/it-IT/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/it-IT/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/it-IT/step_4.md b/it-IT/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/it-IT/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/it-IT/step_5.md b/it-IT/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/it-IT/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/it-IT/step_6.md b/it-IT/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/it-IT/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/it-IT/step_7.md b/it-IT/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/it-IT/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/it-IT/step_8.md b/it-IT/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/it-IT/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/it-IT/step_9.md b/it-IT/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/it-IT/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/ja-JP/images/banner.png b/ja-JP/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/ja-JP/images/banner.png differ diff --git a/ja-JP/images/cheese-sandwich-annotated.png b/ja-JP/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/ja-JP/images/cheese-sandwich-annotated.png differ diff --git a/ja-JP/images/cheese-sandwich.png b/ja-JP/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/ja-JP/images/cheese-sandwich.png differ diff --git a/ja-JP/images/click-flag-annotated.png b/ja-JP/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/ja-JP/images/click-flag-annotated.png differ diff --git a/ja-JP/images/click-flag.png b/ja-JP/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/ja-JP/images/click-flag.png differ diff --git a/ja-JP/images/code-new-blocks.png b/ja-JP/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/ja-JP/images/code-new-blocks.png differ diff --git a/ja-JP/images/code-with-confidence.png b/ja-JP/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/ja-JP/images/code-with-confidence.png differ diff --git a/ja-JP/images/create-project-annotated.png b/ja-JP/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/ja-JP/images/create-project-annotated.png differ diff --git a/ja-JP/images/create-project.png b/ja-JP/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/ja-JP/images/create-project.png differ diff --git a/ja-JP/images/empty-buckets.png b/ja-JP/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/ja-JP/images/empty-buckets.png differ diff --git a/ja-JP/images/full-buckets.png b/ja-JP/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/ja-JP/images/full-buckets.png differ diff --git a/ja-JP/images/open-scratch-3-annotated.png b/ja-JP/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/ja-JP/images/open-scratch-3-annotated.png differ diff --git a/ja-JP/images/open-scratch-3.png b/ja-JP/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/ja-JP/images/open-scratch-3.png differ diff --git a/ja-JP/images/project-make-annotated.png b/ja-JP/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/ja-JP/images/project-make-annotated.png differ diff --git a/ja-JP/images/project-make.png b/ja-JP/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/ja-JP/images/project-make.png differ diff --git a/ja-JP/images/project-templates-annotated.png b/ja-JP/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/ja-JP/images/project-templates-annotated.png differ diff --git a/ja-JP/images/project-templates.png b/ja-JP/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/ja-JP/images/project-templates.png differ diff --git a/ja-JP/images/project-train-annotated.png b/ja-JP/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/ja-JP/images/project-train-annotated.png differ diff --git a/ja-JP/images/projects-list-annotated.png b/ja-JP/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/ja-JP/images/projects-list-annotated.png differ diff --git a/ja-JP/images/projects-list.png b/ja-JP/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/ja-JP/images/projects-list.png differ diff --git a/ja-JP/images/scratch-template-annotated.png b/ja-JP/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/ja-JP/images/scratch-template-annotated.png differ diff --git a/ja-JP/images/scratch-template.png b/ja-JP/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/ja-JP/images/scratch-template.png differ diff --git a/ja-JP/images/smart-classroom.gif b/ja-JP/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/ja-JP/images/smart-classroom.gif differ diff --git a/ja-JP/images/test-new-model-annotated.png b/ja-JP/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/ja-JP/images/test-new-model-annotated.png differ diff --git a/ja-JP/images/test-with-new-blocks-annotated.png b/ja-JP/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/ja-JP/images/test-with-new-blocks-annotated.png differ diff --git a/ja-JP/images/test-with-new-blocks.png b/ja-JP/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/ja-JP/images/test-with-new-blocks.png differ diff --git a/ja-JP/images/train-new-model-annotated.png b/ja-JP/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/ja-JP/images/train-new-model-annotated.png differ diff --git a/ja-JP/images/train-new-model.png b/ja-JP/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/ja-JP/images/train-new-model.png differ diff --git a/ja-JP/images/what-you-will-make.png b/ja-JP/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/ja-JP/images/what-you-will-make.png differ diff --git a/ja-JP/meta.yml b/ja-JP/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/ja-JP/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/ja-JP/step_1.md b/ja-JP/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/ja-JP/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/ja-JP/step_2.md b/ja-JP/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/ja-JP/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/ja-JP/step_3.md b/ja-JP/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/ja-JP/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/ja-JP/step_4.md b/ja-JP/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/ja-JP/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/ja-JP/step_5.md b/ja-JP/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/ja-JP/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/ja-JP/step_6.md b/ja-JP/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/ja-JP/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/ja-JP/step_7.md b/ja-JP/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/ja-JP/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/ja-JP/step_8.md b/ja-JP/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/ja-JP/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/ja-JP/step_9.md b/ja-JP/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/ja-JP/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/ko-KR/images/banner.png b/ko-KR/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/ko-KR/images/banner.png differ diff --git a/ko-KR/images/cheese-sandwich-annotated.png b/ko-KR/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/ko-KR/images/cheese-sandwich-annotated.png differ diff --git a/ko-KR/images/cheese-sandwich.png b/ko-KR/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/ko-KR/images/cheese-sandwich.png differ diff --git a/ko-KR/images/click-flag-annotated.png b/ko-KR/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/ko-KR/images/click-flag-annotated.png differ diff --git a/ko-KR/images/click-flag.png b/ko-KR/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/ko-KR/images/click-flag.png differ diff --git a/ko-KR/images/code-new-blocks.png b/ko-KR/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/ko-KR/images/code-new-blocks.png differ diff --git a/ko-KR/images/code-with-confidence.png b/ko-KR/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/ko-KR/images/code-with-confidence.png differ diff --git a/ko-KR/images/create-project-annotated.png b/ko-KR/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/ko-KR/images/create-project-annotated.png differ diff --git a/ko-KR/images/create-project.png b/ko-KR/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/ko-KR/images/create-project.png differ diff --git a/ko-KR/images/empty-buckets.png b/ko-KR/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/ko-KR/images/empty-buckets.png differ diff --git a/ko-KR/images/full-buckets.png b/ko-KR/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/ko-KR/images/full-buckets.png differ diff --git a/ko-KR/images/open-scratch-3-annotated.png b/ko-KR/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/ko-KR/images/open-scratch-3-annotated.png differ diff --git a/ko-KR/images/open-scratch-3.png b/ko-KR/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/ko-KR/images/open-scratch-3.png differ diff --git a/ko-KR/images/project-make-annotated.png b/ko-KR/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/ko-KR/images/project-make-annotated.png differ diff --git a/ko-KR/images/project-make.png b/ko-KR/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/ko-KR/images/project-make.png differ diff --git a/ko-KR/images/project-templates-annotated.png b/ko-KR/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/ko-KR/images/project-templates-annotated.png differ diff --git a/ko-KR/images/project-templates.png b/ko-KR/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/ko-KR/images/project-templates.png differ diff --git a/ko-KR/images/project-train-annotated.png b/ko-KR/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/ko-KR/images/project-train-annotated.png differ diff --git a/ko-KR/images/projects-list-annotated.png b/ko-KR/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/ko-KR/images/projects-list-annotated.png differ diff --git a/ko-KR/images/projects-list.png b/ko-KR/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/ko-KR/images/projects-list.png differ diff --git a/ko-KR/images/scratch-template-annotated.png b/ko-KR/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/ko-KR/images/scratch-template-annotated.png differ diff --git a/ko-KR/images/scratch-template.png b/ko-KR/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/ko-KR/images/scratch-template.png differ diff --git a/ko-KR/images/smart-classroom.gif b/ko-KR/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/ko-KR/images/smart-classroom.gif differ diff --git a/ko-KR/images/test-new-model-annotated.png b/ko-KR/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/ko-KR/images/test-new-model-annotated.png differ diff --git a/ko-KR/images/test-with-new-blocks-annotated.png b/ko-KR/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/ko-KR/images/test-with-new-blocks-annotated.png differ diff --git a/ko-KR/images/test-with-new-blocks.png b/ko-KR/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/ko-KR/images/test-with-new-blocks.png differ diff --git a/ko-KR/images/train-new-model-annotated.png b/ko-KR/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/ko-KR/images/train-new-model-annotated.png differ diff --git a/ko-KR/images/train-new-model.png b/ko-KR/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/ko-KR/images/train-new-model.png differ diff --git a/ko-KR/images/what-you-will-make.png b/ko-KR/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/ko-KR/images/what-you-will-make.png differ diff --git a/ko-KR/meta.yml b/ko-KR/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/ko-KR/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/ko-KR/step_1.md b/ko-KR/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/ko-KR/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/ko-KR/step_2.md b/ko-KR/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/ko-KR/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/ko-KR/step_3.md b/ko-KR/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/ko-KR/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/ko-KR/step_4.md b/ko-KR/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/ko-KR/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/ko-KR/step_5.md b/ko-KR/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/ko-KR/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/ko-KR/step_6.md b/ko-KR/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/ko-KR/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/ko-KR/step_7.md b/ko-KR/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/ko-KR/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/ko-KR/step_8.md b/ko-KR/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/ko-KR/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/ko-KR/step_9.md b/ko-KR/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/ko-KR/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/nl-NL/images/banner.png b/nl-NL/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/nl-NL/images/banner.png differ diff --git a/nl-NL/images/cheese-sandwich-annotated.png b/nl-NL/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..85c123b Binary files /dev/null and b/nl-NL/images/cheese-sandwich-annotated.png differ diff --git a/nl-NL/images/cheese-sandwich.png b/nl-NL/images/cheese-sandwich.png new file mode 100644 index 0000000..a7e46bd Binary files /dev/null and b/nl-NL/images/cheese-sandwich.png differ diff --git a/nl-NL/images/classroom-devices.png b/nl-NL/images/classroom-devices.png new file mode 100644 index 0000000..a25ed48 Binary files /dev/null and b/nl-NL/images/classroom-devices.png differ diff --git a/nl-NL/images/click-flag-annotated.png b/nl-NL/images/click-flag-annotated.png new file mode 100644 index 0000000..59a85cb Binary files /dev/null and b/nl-NL/images/click-flag-annotated.png differ diff --git a/nl-NL/images/click-flag.png b/nl-NL/images/click-flag.png new file mode 100644 index 0000000..aa3ae65 Binary files /dev/null and b/nl-NL/images/click-flag.png differ diff --git a/nl-NL/images/code-new-blocks.png b/nl-NL/images/code-new-blocks.png new file mode 100644 index 0000000..89ab64f Binary files /dev/null and b/nl-NL/images/code-new-blocks.png differ diff --git a/nl-NL/images/code-with-confidence.png b/nl-NL/images/code-with-confidence.png new file mode 100644 index 0000000..ad09db3 Binary files /dev/null and b/nl-NL/images/code-with-confidence.png differ diff --git a/nl-NL/images/create-project-annotated.png b/nl-NL/images/create-project-annotated.png new file mode 100644 index 0000000..465dce2 Binary files /dev/null and b/nl-NL/images/create-project-annotated.png differ diff --git a/nl-NL/images/create-project.png b/nl-NL/images/create-project.png new file mode 100644 index 0000000..24ca81f Binary files /dev/null and b/nl-NL/images/create-project.png differ diff --git a/nl-NL/images/empty-buckets.png b/nl-NL/images/empty-buckets.png new file mode 100644 index 0000000..cc665dc Binary files /dev/null and b/nl-NL/images/empty-buckets.png differ diff --git a/nl-NL/images/fan-on-and-off.png b/nl-NL/images/fan-on-and-off.png new file mode 100644 index 0000000..76beb24 Binary files /dev/null and b/nl-NL/images/fan-on-and-off.png differ diff --git a/nl-NL/images/full-buckets.png b/nl-NL/images/full-buckets.png new file mode 100644 index 0000000..91b76d5 Binary files /dev/null and b/nl-NL/images/full-buckets.png differ diff --git a/nl-NL/images/new-blocks-menu.png b/nl-NL/images/new-blocks-menu.png new file mode 100644 index 0000000..b1d716a Binary files /dev/null and b/nl-NL/images/new-blocks-menu.png differ diff --git a/nl-NL/images/open-scratch-3-annotated.png b/nl-NL/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..8c26af7 Binary files /dev/null and b/nl-NL/images/open-scratch-3-annotated.png differ diff --git a/nl-NL/images/open-scratch-3.png b/nl-NL/images/open-scratch-3.png new file mode 100644 index 0000000..3119a4d Binary files /dev/null and b/nl-NL/images/open-scratch-3.png differ diff --git a/nl-NL/images/play-music.png b/nl-NL/images/play-music.png new file mode 100644 index 0000000..852902d Binary files /dev/null and b/nl-NL/images/play-music.png differ diff --git a/nl-NL/images/project-make-annotated.png b/nl-NL/images/project-make-annotated.png new file mode 100644 index 0000000..0043701 Binary files /dev/null and b/nl-NL/images/project-make-annotated.png differ diff --git a/nl-NL/images/project-make.png b/nl-NL/images/project-make.png new file mode 100644 index 0000000..50e9264 Binary files /dev/null and b/nl-NL/images/project-make.png differ diff --git a/nl-NL/images/project-templates-annotated.png b/nl-NL/images/project-templates-annotated.png new file mode 100644 index 0000000..7f1147b Binary files /dev/null and b/nl-NL/images/project-templates-annotated.png differ diff --git a/nl-NL/images/project-templates.png b/nl-NL/images/project-templates.png new file mode 100644 index 0000000..564f5b9 Binary files /dev/null and b/nl-NL/images/project-templates.png differ diff --git a/nl-NL/images/project-train-annotated.png b/nl-NL/images/project-train-annotated.png new file mode 100644 index 0000000..fc9ca40 Binary files /dev/null and b/nl-NL/images/project-train-annotated.png differ diff --git a/nl-NL/images/project-train.png b/nl-NL/images/project-train.png new file mode 100644 index 0000000..3a3bdb9 Binary files /dev/null and b/nl-NL/images/project-train.png differ diff --git a/nl-NL/images/projects-list-annotated.png b/nl-NL/images/projects-list-annotated.png new file mode 100644 index 0000000..449c177 Binary files /dev/null and b/nl-NL/images/projects-list-annotated.png differ diff --git a/nl-NL/images/projects-list.png b/nl-NL/images/projects-list.png new file mode 100644 index 0000000..c085208 Binary files /dev/null and b/nl-NL/images/projects-list.png differ diff --git a/nl-NL/images/save-to-computer.png b/nl-NL/images/save-to-computer.png new file mode 100644 index 0000000..3d67093 Binary files /dev/null and b/nl-NL/images/save-to-computer.png differ diff --git a/nl-NL/images/scratch-template-annotated.png b/nl-NL/images/scratch-template-annotated.png new file mode 100644 index 0000000..bf706d2 Binary files /dev/null and b/nl-NL/images/scratch-template-annotated.png differ diff --git a/nl-NL/images/scratch-template.png b/nl-NL/images/scratch-template.png new file mode 100644 index 0000000..fab906a Binary files /dev/null and b/nl-NL/images/scratch-template.png differ diff --git a/nl-NL/images/smart-classroom.gif b/nl-NL/images/smart-classroom.gif new file mode 100644 index 0000000..1c62c1d Binary files /dev/null and b/nl-NL/images/smart-classroom.gif differ diff --git a/nl-NL/images/smart-classroom.png b/nl-NL/images/smart-classroom.png new file mode 100644 index 0000000..e166aaa Binary files /dev/null and b/nl-NL/images/smart-classroom.png differ diff --git a/nl-NL/images/test-model.png b/nl-NL/images/test-model.png new file mode 100644 index 0000000..7dad5b5 Binary files /dev/null and b/nl-NL/images/test-model.png differ diff --git a/nl-NL/images/test-new-model-annotated.png b/nl-NL/images/test-new-model-annotated.png new file mode 100644 index 0000000..900719e Binary files /dev/null and b/nl-NL/images/test-new-model-annotated.png differ diff --git a/nl-NL/images/test-with-new-blocks-annotated.png b/nl-NL/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..544fc9d Binary files /dev/null and b/nl-NL/images/test-with-new-blocks-annotated.png differ diff --git a/nl-NL/images/test-with-new-blocks.png b/nl-NL/images/test-with-new-blocks.png new file mode 100644 index 0000000..36ed3ee Binary files /dev/null and b/nl-NL/images/test-with-new-blocks.png differ diff --git a/nl-NL/images/train-new-model-annotated.png b/nl-NL/images/train-new-model-annotated.png new file mode 100644 index 0000000..aa60f87 Binary files /dev/null and b/nl-NL/images/train-new-model-annotated.png differ diff --git a/nl-NL/images/train-new-model.png b/nl-NL/images/train-new-model.png new file mode 100644 index 0000000..cf911a1 Binary files /dev/null and b/nl-NL/images/train-new-model.png differ diff --git a/nl-NL/images/turn-fan-off.png b/nl-NL/images/turn-fan-off.png new file mode 100644 index 0000000..00280bb Binary files /dev/null and b/nl-NL/images/turn-fan-off.png differ diff --git a/nl-NL/images/turn-fan-on.png b/nl-NL/images/turn-fan-on.png new file mode 100644 index 0000000..86d49d4 Binary files /dev/null and b/nl-NL/images/turn-fan-on.png differ diff --git a/nl-NL/images/what-you-will-make.png b/nl-NL/images/what-you-will-make.png new file mode 100644 index 0000000..a2268da Binary files /dev/null and b/nl-NL/images/what-you-will-make.png differ diff --git a/nl-NL/images/whatyouwillmake.gif b/nl-NL/images/whatyouwillmake.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/nl-NL/images/whatyouwillmake.gif differ diff --git a/nl-NL/meta.yml b/nl-NL/meta.yml new file mode 100644 index 0000000..6236b8c --- /dev/null +++ b/nl-NL/meta.yml @@ -0,0 +1,24 @@ +--- +title: Slimme klassenassistent +hero_image: images/banner.png +description: Maak een virtuele klassenassistent die op opdrachten reageert +version: 4 +listed: true +copyedit: true +last_tested: '2019-05-09' +steps: +- title: Inleiding +- title: Hoe maak je een project +- title: Voeg een lijst met regels toe + completion: + - engaged +- title: Verzamel voorbeelden voor training +- title: Train en test een machine learning-model +- title: 'Gebruik het machine learning-model in Scratch' + challenge: true +- title: Hoe betrouwbaarheidsscores te gebruiken + completion: + - internal +- title: 'Uitdaging: meer items om te controleren' + completion: + - external diff --git a/nl-NL/resources/NEW smart assistant 1.mp4 b/nl-NL/resources/NEW smart assistant 1.mp4 new file mode 100644 index 0000000..51c457d Binary files /dev/null and b/nl-NL/resources/NEW smart assistant 1.mp4 differ diff --git a/nl-NL/resources/NEW smart assistant 1.srt b/nl-NL/resources/NEW smart assistant 1.srt new file mode 100644 index 0000000..25e9bad --- /dev/null +++ b/nl-NL/resources/NEW smart assistant 1.srt @@ -0,0 +1,26 @@ +1 +00:00:04,040 --> 00:00:09,880 +Go to rpf.io/ML4K and get started. + +2 +00:00:09,880 --> 00:00:12,640 +Click 'Try it now'. + +3 +00:00:12,640 --> 00:00:16,960 +Add a new project called 'Smart assistant'. + +4 +00:00:16,960 --> 00:00:23,240 +Set it to recognise English text, and  +store the data in your web browser. + +5 +00:00:23,240 --> 00:00:29,000 +Create the project, then  +click on the project's name. + +6 +00:00:29,000 --> 00:00:29,960 +Now, click 'Train'. + diff --git a/nl-NL/resources/NEW smart assistant 2.mp4 b/nl-NL/resources/NEW smart assistant 2.mp4 new file mode 100644 index 0000000..2e90362 Binary files /dev/null and b/nl-NL/resources/NEW smart assistant 2.mp4 differ diff --git a/nl-NL/resources/NEW smart assistant 2.srt b/nl-NL/resources/NEW smart assistant 2.srt new file mode 100644 index 0000000..aec96dd --- /dev/null +++ b/nl-NL/resources/NEW smart assistant 2.srt @@ -0,0 +1,27 @@ +1 +00:00:04,320 --> 00:00:06,080 +Add two new labels + +2 +00:00:06,080 --> 00:00:08,400 +one for fan on, + +3 +00:00:08,400 --> 00:00:12,760 +and one for fan off. + +4 +00:00:12,760 --> 00:00:18,200 +Type an example of a command  +that should turn the fan on. + +5 +00:00:18,200 --> 00:00:23,240 +Then repeat until you have  +eight different examples. + +6 +00:00:23,240 --> 00:00:29,960 +Now you need eight different examples  +of commands to turn the fan off too. + diff --git a/nl-NL/resources/NEW smart assistant 3.mp4 b/nl-NL/resources/NEW smart assistant 3.mp4 new file mode 100644 index 0000000..2fdde5f Binary files /dev/null and b/nl-NL/resources/NEW smart assistant 3.mp4 differ diff --git a/nl-NL/resources/NEW smart assistant 3.srt b/nl-NL/resources/NEW smart assistant 3.srt new file mode 100644 index 0000000..9870fbf --- /dev/null +++ b/nl-NL/resources/NEW smart assistant 3.srt @@ -0,0 +1,19 @@ +1 +00:00:03,600 --> 00:00:10,200 +Click 'Back', then click 'Learn & Test'. + +2 +00:00:10,200 --> 00:00:14,720 +Click the button to start training  +your model - it might take a while. + +3 +00:00:14,720 --> 00:00:18,120 +When it is done, type in a  +command to test the model. + +4 +00:00:18,120 --> 00:00:29,960 +Check that your command gives  +the result you were expecting. + diff --git a/nl-NL/resources/NEW smart assistant 4.mp4 b/nl-NL/resources/NEW smart assistant 4.mp4 new file mode 100644 index 0000000..a295711 Binary files /dev/null and b/nl-NL/resources/NEW smart assistant 4.mp4 differ diff --git a/nl-NL/resources/NEW smart assistant 4.srt b/nl-NL/resources/NEW smart assistant 4.srt new file mode 100644 index 0000000..a68f19a --- /dev/null +++ b/nl-NL/resources/NEW smart assistant 4.srt @@ -0,0 +1,28 @@ +1 +00:00:03,800 --> 00:00:12,600 +Now click 'Back' again, then click  +'Make' to use the model in Scratch. + +2 +00:00:12,600 --> 00:00:20,160 +Go to 'Project templates' and select  +the 'Smart Classroom' template. + +3 +00:00:20,160 --> 00:00:29,960 +Now, drag the code blocks to ask for  +a command and turn the fan on or off. + +4 +00:00:38,160 --> 00:00:48,800 +You can right click and select  +'Duplicate' to save time. + +5 +00:00:48,800 --> 00:00:54,240 +Now type in a command to control the fan. + +6 +00:00:54,240 --> 00:00:59,920 +Type another command to turn it off. + diff --git a/nl-NL/resources/NEW smart assistant 5.mp4 b/nl-NL/resources/NEW smart assistant 5.mp4 new file mode 100644 index 0000000..945ac64 Binary files /dev/null and b/nl-NL/resources/NEW smart assistant 5.mp4 differ diff --git a/nl-NL/resources/NEW smart assistant 5.srt b/nl-NL/resources/NEW smart assistant 5.srt new file mode 100644 index 0000000..f1c1642 --- /dev/null +++ b/nl-NL/resources/NEW smart assistant 5.srt @@ -0,0 +1,33 @@ +1 +00:00:04,120 --> 00:00:07,480 +Go back to the 'Learn & Test' page. + +2 +00:00:07,480 --> 00:00:11,440 +Type something that is not  +related to lamps or fans. + +3 +00:00:11,440 --> 00:00:15,240 +The model should show a low  +confidence about its prediction. + +4 +00:00:15,240 --> 00:00:19,520 +Go back to Scratch, and move all of  +your 'if' statements to one side. + +5 +00:00:19,520 --> 00:00:26,880 +Add an 'if/else' so that your code can  +check the model's confidence level. + +6 +00:00:26,880 --> 00:00:36,640 +The light or fan should only be changed  +if the confidence level is over 70. + +7 +00:00:36,640 --> 00:00:46,640 +Try it out! + diff --git a/nl-NL/resources/readme.txt b/nl-NL/resources/readme.txt new file mode 100644 index 0000000..f5f90aa --- /dev/null +++ b/nl-NL/resources/readme.txt @@ -0,0 +1 @@ +Om een video met ondertiteling te bekijken op VLC (videolan.org), zorg ervoor dat het videobestand en het ondertitelingsbestand in dezelfde map zitten en exact dezelfde naam hebben (bijv. video.mp4 en video.srt). Open de video in VLC, dan wordt de ondertiteling automatisch geladen. Als de ondertiteling niet verschijnt, klik dan met de rechtermuisknop op het videoscherm, ga naar **Subtitle**, dan **Add Subtitle File**, en selecteer het juiste .srt-bestand. Veel kijkplezier met de ondertiteling! \ No newline at end of file diff --git a/nl-NL/step_1.md b/nl-NL/step_1.md new file mode 100644 index 0000000..4ffa1da --- /dev/null +++ b/nl-NL/step_1.md @@ -0,0 +1,33 @@ +## Wat ga je maken + +Create a smart virtual assistant that reacts to your commands. + +![A Scratch project with a fan and a light and a box to type in](images/whatyouwillmake.gif) + +\--- collapse --- + +--- + +## title: Where are my commands stored? + +- This project uses a technology called 'machine learning'. Machine learning systems are trained using a large amount of data. +- This project does not require you to create an account or log in. For this project, the examples you use to make the model are only stored temporarily in your browser (only on your machine). + \--- /collapse --- + +## --- collapse --- + +## title: No YouTube? Download the videos! + +You can [download all the videos for this project](https://rpf.io/p/en/smart-assistant-go){:target="_blank"}. + +\--- /collapse --- + +## --- collapse --- + +## Licentie + +Dit project heeft een dubbele licentie onder zowel een [Creative Commons Attribution Non-Commercial Share-Alike-licentie](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} en een [Apache-licentie versie 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We willen Dale van machinelearningforkids.co.uk graag bedanken voor al zijn werk aan dit project. + +\--- /collapse --- \ No newline at end of file diff --git a/nl-NL/step_2.md b/nl-NL/step_2.md new file mode 100644 index 0000000..b3314ec --- /dev/null +++ b/nl-NL/step_2.md @@ -0,0 +1,38 @@ +## Hoe maak je een project + + +
+ +
+ + +\--- task --- + +- Ga naar [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in een webbrowser. + +- Klik op **Begin** + +- Klik op **Scratch 3**. + +\--- /task --- + +\--- task --- + +- Klik op **Projecten** in de menubalk bovenaan. + +- Klik op de knop **+ Voeg een nieuw project toe**. + +- Name your project `Smart assistant` and set it to learn to recognise **text**, and store data **in your web browser**. Klik vervolgens op **Creëer**. + ![Een project maken](images/create-project-annotated.png) + +- You should now see 'Smart assistant' in the projects list. Klik op dit project. + ![Project list with smart classroom listed](images/projects-list-annotated.png) --- /task --- + +\--- /task --- + +\--- task --- + +- Klik op **Maak**. + ![Project hoofdmenu](images/project-make-annotated.png) + +\--- /task --- diff --git a/nl-NL/step_3.md b/nl-NL/step_3.md new file mode 100644 index 0000000..d83c009 --- /dev/null +++ b/nl-NL/step_3.md @@ -0,0 +1,45 @@ +## Voeg een lijst met regels toe + + +
+ +
+ + +The room has two devices: a fan and a light. + +![A fan and a light](images/classroom-devices.png) + +Your assistant needs some examples of the phrases you might say when you want each device to be turned on or off. For example, to **turn the fan on**, you might say: + +- "Zet de ventilator aan" +- "Zet de ventilator uit" +- "Zet de lamp uit" +- "I'm too hot" +- "It's hot in here" + +\--- task --- + +- Click on **+ Add new label** on the top right and add the label “fan on”. + +\--- /task --- + +\--- task --- + +- Click on **Add example** and type in `Turn on the fan`. + +\--- /task --- + +\--- task --- + +- Continue to click on the **Add example** button and add different ways of asking for the fan to be turned on until you have eight different ways of asking. + +\--- /task --- + +\--- task --- + +- Click on **Add new label**, but this time create the label "fan off". Add eight different examples of ways you could ask for the fan to be turned off. + +\--- /task --- + +![Fan on and off categories with eight examples of commands in each](images/fan-on-and-off.png) diff --git a/nl-NL/step_4.md b/nl-NL/step_4.md new file mode 100644 index 0000000..5b661d5 --- /dev/null +++ b/nl-NL/step_4.md @@ -0,0 +1,40 @@ +## Verzamel voorbeelden voor training + + +
+ +
+ + +Now that you have some example data, you can train the machine learning model to label a command as either 'fan on' or 'fan off' based on your examples. + +\--- task --- + +- Click on the **< Back to project** link, then click on **Learn & Test**. + +\--- /task --- + +\--- task --- + +- Click on the **Train new machine learning model** button. + +\--- /task --- + +Wait for the training to complete — this might take a minute or two. Once the training has completed, a test box appears. + +\--- task --- + +- Type in `put the fan on` and check that the model labels this input as 'fan on'. + ![Type in put the fan on to see if it is recognised](images/test-model.png) + +\--- /task --- + +\--- task --- + +- Try typing in some other commands for turning the fan on and off, and check that they are given the label you expect. + +\--- /task --- + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + + diff --git a/nl-NL/step_5.md b/nl-NL/step_5.md new file mode 100644 index 0000000..265420a --- /dev/null +++ b/nl-NL/step_5.md @@ -0,0 +1,59 @@ +## Train en test een machine learning-model + + +
+ +
+ + +Now that your model can distinguish between commands, you can use it in a Scratch program to create your smart assistant. + +\--- task --- + +- Click on the **< Back to project** link. + +- Click on **Make**. + +- Click on **Scratch 3**. + +- Click on **Open in Scratch 3**. + +\--- /task --- + +\--- task --- + +- Click on **Project templates** at the top and select the 'Smart classroom' project to load the fan and light sprites. This project also contains pre-made yellow `broadcast` blocks, which can be found under **Events**. + +![Smart classroom project is selected in the Scratch templates](images/smart-classroom.png) + +\--- /task --- + +Machine Learning for Kids has added some special blocks to Scratch to allow you to use the model you just trained. Find them at the bottom of the blocks list. + +![New 'smart assistant' blocks shown in the menu underneath Images](images/new-blocks-menu.png) + +\--- task --- + +- Make sure you have the **Classroom** sprite selected, then click on the **Code** tab and add this code: + +![New scratch code: when flag clicked, forever, ask 'enter your command' and wait. If recognise text (answer) label = fan on, then broadcast turn-fan-on ](images/turn-fan-on.png) + +\--- /task --- + +\--- task --- + +- Right click on the `if` block and select **Duplicate** to add a copy of the whole block of code, and put it directly underneath the first `if`. + +- Change the second copy of the block so that it recognises the text for turning the fan **off**, and broadcasts **turn-fan-off**. + +![New scratch code: If recognise text (answer) label = fan off, then broadcast turn-fan-off](images/turn-fan-off.png) + +\--- /task --- + +\--- task --- + +- Click the **green flag** and type in a command to turn the fan on or off. Check that it has the result you expected. + +- Make sure you test that the assistant performs the correct action **even for commands that you didn’t include as examples**. + +\--- /task --- diff --git a/nl-NL/step_6.md b/nl-NL/step_6.md new file mode 100644 index 0000000..07769b6 --- /dev/null +++ b/nl-NL/step_6.md @@ -0,0 +1,65 @@ +## Gebruik het machine learning-model in Scratch + +\--- challenge --- + +Follow the same steps as before to allow the assistant to also control the light. + +\--- task --- + +- Save a copy of your Scratch project to your computer so that you can easily reload it later to use with your new model. + +![Click on File and then Save to computer](images/save-to-computer.png) + +\--- /task --- + +\--- task --- + +- Go back to your model (**Back to project** > **Train**) and add two more labels: `light_on` and `light_off`. + +\--- /task --- + +\--- task --- + +- Add eight examples of commands you might use to turn the light on. + +\--- /task --- + +\--- task --- + +- Add eight examples of commands you might use to turn the light off. + +\--- /task --- + +\--- task --- + +- Re-train your model (**Back to project** > **Learn and Test**) so that it can also recognise commands for turning the light on and off. + +\--- /task --- + +\--- task --- + +- Load your new model into Scratch (**Make** > **Scratch 3** > **Open in Scratch 3**). + +- In Scratch, reload the code you saved earlier (**File** > **Load from my computer**). + +- Add two more `if` blocks to your program so that you can type commands to control the light. + +## --- collapse --- + +## title: I can't see the blocks for light_on / light_off + +If you have trained a new model, you will need to close Scratch and then re-open it from the Machine Learning for Kids website for any new blocks to appear. + +Click **Make** > **Scratch 3** > **Open in Scratch 3**. + +\--- /collapse --- + +\--- /task --- + +\--- task --- + +- Test whether your program works by typing in commands to turn the light on and off, and checking whether the outcome is as you expected. + +\--- /task --- + +\--- /challenge --- diff --git a/nl-NL/step_7.md b/nl-NL/step_7.md new file mode 100644 index 0000000..4ded462 --- /dev/null +++ b/nl-NL/step_7.md @@ -0,0 +1,39 @@ +## Hoe betrouwbaarheidsscores te gebruiken + + +
+ +
+ + +The model can tell you how **confident** it is about whether it is correct. + +\--- task --- + +- Ga terug naar de pagina **Leer & Test** in de trainingstool. + +- Typ iets dat niets te maken heeft met lampen of ventilatoren in de testbox. For example, you could type in 'play some music'. + +![Result of entering "play some music" is fan on with 36% confidence](images/play-music.png) + +\--- /task --- + +The **confidence score** is the program’s way of telling you how likely it is that it has labelled the command correctly. + +\--- task --- + +- Return to Scratch. + +- Add some new code so that the assistant will tell you it didn't understand the command if the confidence score is less than 70%. + +![New Scratch code: If recognise text (answer) confidence < 70, say 'Sorry I didn't understand that' for 2 seconds](images/code-with-confidence.png) + +\--- /task --- + +\--- task --- + +- Klik op de groene vlag en test je programma om te controleren of jouw klassenassistent op de juiste manier reageert: + - Typ opdrachten in die niets met de ventilator of lamp te maken hebben + - Vraag om iets aan of uit te zetten + +\--- /task --- diff --git a/nl-NL/step_8.md b/nl-NL/step_8.md new file mode 100644 index 0000000..ef94fc7 --- /dev/null +++ b/nl-NL/step_8.md @@ -0,0 +1,3 @@ +## Uitdaging: meer items om te controleren + +There are lots of other machine learning and AI projects in the [Machine learning with Scratch](https://projects.raspberrypi.org/en/pathways/scratch-machine-learning) pathway. diff --git a/nl-NL/step_9.md b/nl-NL/step_9.md new file mode 100644 index 0000000..585a95c --- /dev/null +++ b/nl-NL/step_9.md @@ -0,0 +1,7 @@ +## Wat kun je verder nog doen? + +Probeer onze andere machine learning met Scratch-projecten als je dat nog niet hebt gedaan. + +[Reis naar school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Vreemde taal](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/no-NO/images/banner.png b/no-NO/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/no-NO/images/banner.png differ diff --git a/no-NO/images/cheese-sandwich-annotated.png b/no-NO/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/no-NO/images/cheese-sandwich-annotated.png differ diff --git a/no-NO/images/cheese-sandwich.png b/no-NO/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/no-NO/images/cheese-sandwich.png differ diff --git a/no-NO/images/click-flag-annotated.png b/no-NO/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/no-NO/images/click-flag-annotated.png differ diff --git a/no-NO/images/click-flag.png b/no-NO/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/no-NO/images/click-flag.png differ diff --git a/no-NO/images/code-new-blocks.png b/no-NO/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/no-NO/images/code-new-blocks.png differ diff --git a/no-NO/images/code-with-confidence.png b/no-NO/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/no-NO/images/code-with-confidence.png differ diff --git a/no-NO/images/create-project-annotated.png b/no-NO/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/no-NO/images/create-project-annotated.png differ diff --git a/no-NO/images/create-project.png b/no-NO/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/no-NO/images/create-project.png differ diff --git a/no-NO/images/empty-buckets.png b/no-NO/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/no-NO/images/empty-buckets.png differ diff --git a/no-NO/images/full-buckets.png b/no-NO/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/no-NO/images/full-buckets.png differ diff --git a/no-NO/images/open-scratch-3-annotated.png b/no-NO/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/no-NO/images/open-scratch-3-annotated.png differ diff --git a/no-NO/images/open-scratch-3.png b/no-NO/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/no-NO/images/open-scratch-3.png differ diff --git a/no-NO/images/project-make-annotated.png b/no-NO/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/no-NO/images/project-make-annotated.png differ diff --git a/no-NO/images/project-make.png b/no-NO/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/no-NO/images/project-make.png differ diff --git a/no-NO/images/project-templates-annotated.png b/no-NO/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/no-NO/images/project-templates-annotated.png differ diff --git a/no-NO/images/project-templates.png b/no-NO/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/no-NO/images/project-templates.png differ diff --git a/no-NO/images/project-train-annotated.png b/no-NO/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/no-NO/images/project-train-annotated.png differ diff --git a/no-NO/images/projects-list-annotated.png b/no-NO/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/no-NO/images/projects-list-annotated.png differ diff --git a/no-NO/images/projects-list.png b/no-NO/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/no-NO/images/projects-list.png differ diff --git a/no-NO/images/scratch-template-annotated.png b/no-NO/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/no-NO/images/scratch-template-annotated.png differ diff --git a/no-NO/images/scratch-template.png b/no-NO/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/no-NO/images/scratch-template.png differ diff --git a/no-NO/images/smart-classroom.gif b/no-NO/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/no-NO/images/smart-classroom.gif differ diff --git a/no-NO/images/test-new-model-annotated.png b/no-NO/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/no-NO/images/test-new-model-annotated.png differ diff --git a/no-NO/images/test-with-new-blocks-annotated.png b/no-NO/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/no-NO/images/test-with-new-blocks-annotated.png differ diff --git a/no-NO/images/test-with-new-blocks.png b/no-NO/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/no-NO/images/test-with-new-blocks.png differ diff --git a/no-NO/images/train-new-model-annotated.png b/no-NO/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/no-NO/images/train-new-model-annotated.png differ diff --git a/no-NO/images/train-new-model.png b/no-NO/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/no-NO/images/train-new-model.png differ diff --git a/no-NO/images/what-you-will-make.png b/no-NO/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/no-NO/images/what-you-will-make.png differ diff --git a/no-NO/meta.yml b/no-NO/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/no-NO/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/no-NO/step_1.md b/no-NO/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/no-NO/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/no-NO/step_2.md b/no-NO/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/no-NO/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/no-NO/step_3.md b/no-NO/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/no-NO/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/no-NO/step_4.md b/no-NO/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/no-NO/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/no-NO/step_5.md b/no-NO/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/no-NO/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/no-NO/step_6.md b/no-NO/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/no-NO/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/no-NO/step_7.md b/no-NO/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/no-NO/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/no-NO/step_8.md b/no-NO/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/no-NO/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/no-NO/step_9.md b/no-NO/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/no-NO/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/pl-PL/images/banner.png b/pl-PL/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/pl-PL/images/banner.png differ diff --git a/pl-PL/images/cheese-sandwich-annotated.png b/pl-PL/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/pl-PL/images/cheese-sandwich-annotated.png differ diff --git a/pl-PL/images/cheese-sandwich.png b/pl-PL/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/pl-PL/images/cheese-sandwich.png differ diff --git a/pl-PL/images/click-flag-annotated.png b/pl-PL/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/pl-PL/images/click-flag-annotated.png differ diff --git a/pl-PL/images/click-flag.png b/pl-PL/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/pl-PL/images/click-flag.png differ diff --git a/pl-PL/images/code-new-blocks.png b/pl-PL/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/pl-PL/images/code-new-blocks.png differ diff --git a/pl-PL/images/code-with-confidence.png b/pl-PL/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/pl-PL/images/code-with-confidence.png differ diff --git a/pl-PL/images/create-project-annotated.png b/pl-PL/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/pl-PL/images/create-project-annotated.png differ diff --git a/pl-PL/images/create-project.png b/pl-PL/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/pl-PL/images/create-project.png differ diff --git a/pl-PL/images/empty-buckets.png b/pl-PL/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/pl-PL/images/empty-buckets.png differ diff --git a/pl-PL/images/full-buckets.png b/pl-PL/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/pl-PL/images/full-buckets.png differ diff --git a/pl-PL/images/open-scratch-3-annotated.png b/pl-PL/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/pl-PL/images/open-scratch-3-annotated.png differ diff --git a/pl-PL/images/open-scratch-3.png b/pl-PL/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/pl-PL/images/open-scratch-3.png differ diff --git a/pl-PL/images/project-make-annotated.png b/pl-PL/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/pl-PL/images/project-make-annotated.png differ diff --git a/pl-PL/images/project-make.png b/pl-PL/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/pl-PL/images/project-make.png differ diff --git a/pl-PL/images/project-templates-annotated.png b/pl-PL/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/pl-PL/images/project-templates-annotated.png differ diff --git a/pl-PL/images/project-templates.png b/pl-PL/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/pl-PL/images/project-templates.png differ diff --git a/pl-PL/images/project-train-annotated.png b/pl-PL/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/pl-PL/images/project-train-annotated.png differ diff --git a/pl-PL/images/projects-list-annotated.png b/pl-PL/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/pl-PL/images/projects-list-annotated.png differ diff --git a/pl-PL/images/projects-list.png b/pl-PL/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/pl-PL/images/projects-list.png differ diff --git a/pl-PL/images/scratch-template-annotated.png b/pl-PL/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/pl-PL/images/scratch-template-annotated.png differ diff --git a/pl-PL/images/scratch-template.png b/pl-PL/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/pl-PL/images/scratch-template.png differ diff --git a/pl-PL/images/smart-classroom.gif b/pl-PL/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/pl-PL/images/smart-classroom.gif differ diff --git a/pl-PL/images/test-new-model-annotated.png b/pl-PL/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/pl-PL/images/test-new-model-annotated.png differ diff --git a/pl-PL/images/test-with-new-blocks-annotated.png b/pl-PL/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/pl-PL/images/test-with-new-blocks-annotated.png differ diff --git a/pl-PL/images/test-with-new-blocks.png b/pl-PL/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/pl-PL/images/test-with-new-blocks.png differ diff --git a/pl-PL/images/train-new-model-annotated.png b/pl-PL/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/pl-PL/images/train-new-model-annotated.png differ diff --git a/pl-PL/images/train-new-model.png b/pl-PL/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/pl-PL/images/train-new-model.png differ diff --git a/pl-PL/images/what-you-will-make.png b/pl-PL/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/pl-PL/images/what-you-will-make.png differ diff --git a/pl-PL/meta.yml b/pl-PL/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/pl-PL/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/pl-PL/step_1.md b/pl-PL/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/pl-PL/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/pl-PL/step_2.md b/pl-PL/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/pl-PL/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/pl-PL/step_3.md b/pl-PL/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/pl-PL/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/pl-PL/step_4.md b/pl-PL/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/pl-PL/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/pl-PL/step_5.md b/pl-PL/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/pl-PL/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/pl-PL/step_6.md b/pl-PL/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/pl-PL/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/pl-PL/step_7.md b/pl-PL/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/pl-PL/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/pl-PL/step_8.md b/pl-PL/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/pl-PL/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/pl-PL/step_9.md b/pl-PL/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/pl-PL/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/pt-BR/images/banner.png b/pt-BR/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/pt-BR/images/banner.png differ diff --git a/pt-BR/images/cheese-sandwich-annotated.png b/pt-BR/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/pt-BR/images/cheese-sandwich-annotated.png differ diff --git a/pt-BR/images/cheese-sandwich.png b/pt-BR/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/pt-BR/images/cheese-sandwich.png differ diff --git a/pt-BR/images/click-flag-annotated.png b/pt-BR/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/pt-BR/images/click-flag-annotated.png differ diff --git a/pt-BR/images/click-flag.png b/pt-BR/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/pt-BR/images/click-flag.png differ diff --git a/pt-BR/images/code-new-blocks.png b/pt-BR/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/pt-BR/images/code-new-blocks.png differ diff --git a/pt-BR/images/code-with-confidence.png b/pt-BR/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/pt-BR/images/code-with-confidence.png differ diff --git a/pt-BR/images/create-project-annotated.png b/pt-BR/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/pt-BR/images/create-project-annotated.png differ diff --git a/pt-BR/images/create-project.png b/pt-BR/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/pt-BR/images/create-project.png differ diff --git a/pt-BR/images/empty-buckets.png b/pt-BR/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/pt-BR/images/empty-buckets.png differ diff --git a/pt-BR/images/full-buckets.png b/pt-BR/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/pt-BR/images/full-buckets.png differ diff --git a/pt-BR/images/open-scratch-3-annotated.png b/pt-BR/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/pt-BR/images/open-scratch-3-annotated.png differ diff --git a/pt-BR/images/open-scratch-3.png b/pt-BR/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/pt-BR/images/open-scratch-3.png differ diff --git a/pt-BR/images/project-make-annotated.png b/pt-BR/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/pt-BR/images/project-make-annotated.png differ diff --git a/pt-BR/images/project-make.png b/pt-BR/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/pt-BR/images/project-make.png differ diff --git a/pt-BR/images/project-templates-annotated.png b/pt-BR/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/pt-BR/images/project-templates-annotated.png differ diff --git a/pt-BR/images/project-templates.png b/pt-BR/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/pt-BR/images/project-templates.png differ diff --git a/pt-BR/images/project-train-annotated.png b/pt-BR/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/pt-BR/images/project-train-annotated.png differ diff --git a/pt-BR/images/projects-list-annotated.png b/pt-BR/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/pt-BR/images/projects-list-annotated.png differ diff --git a/pt-BR/images/projects-list.png b/pt-BR/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/pt-BR/images/projects-list.png differ diff --git a/pt-BR/images/scratch-template-annotated.png b/pt-BR/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/pt-BR/images/scratch-template-annotated.png differ diff --git a/pt-BR/images/scratch-template.png b/pt-BR/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/pt-BR/images/scratch-template.png differ diff --git a/pt-BR/images/smart-classroom.gif b/pt-BR/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/pt-BR/images/smart-classroom.gif differ diff --git a/pt-BR/images/test-new-model-annotated.png b/pt-BR/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/pt-BR/images/test-new-model-annotated.png differ diff --git a/pt-BR/images/test-with-new-blocks-annotated.png b/pt-BR/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/pt-BR/images/test-with-new-blocks-annotated.png differ diff --git a/pt-BR/images/test-with-new-blocks.png b/pt-BR/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/pt-BR/images/test-with-new-blocks.png differ diff --git a/pt-BR/images/train-new-model-annotated.png b/pt-BR/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/pt-BR/images/train-new-model-annotated.png differ diff --git a/pt-BR/images/train-new-model.png b/pt-BR/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/pt-BR/images/train-new-model.png differ diff --git a/pt-BR/images/what-you-will-make.png b/pt-BR/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/pt-BR/images/what-you-will-make.png differ diff --git a/pt-BR/meta.yml b/pt-BR/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/pt-BR/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/pt-BR/step_1.md b/pt-BR/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/pt-BR/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/pt-BR/step_2.md b/pt-BR/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/pt-BR/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/pt-BR/step_3.md b/pt-BR/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/pt-BR/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/pt-BR/step_4.md b/pt-BR/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/pt-BR/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/pt-BR/step_5.md b/pt-BR/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/pt-BR/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/pt-BR/step_6.md b/pt-BR/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/pt-BR/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/pt-BR/step_7.md b/pt-BR/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/pt-BR/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/pt-BR/step_8.md b/pt-BR/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/pt-BR/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/pt-BR/step_9.md b/pt-BR/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/pt-BR/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/pt-PT/images/banner.png b/pt-PT/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/pt-PT/images/banner.png differ diff --git a/pt-PT/images/cheese-sandwich-annotated.png b/pt-PT/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/pt-PT/images/cheese-sandwich-annotated.png differ diff --git a/pt-PT/images/cheese-sandwich.png b/pt-PT/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/pt-PT/images/cheese-sandwich.png differ diff --git a/pt-PT/images/click-flag-annotated.png b/pt-PT/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/pt-PT/images/click-flag-annotated.png differ diff --git a/pt-PT/images/click-flag.png b/pt-PT/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/pt-PT/images/click-flag.png differ diff --git a/pt-PT/images/code-new-blocks.png b/pt-PT/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/pt-PT/images/code-new-blocks.png differ diff --git a/pt-PT/images/code-with-confidence.png b/pt-PT/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/pt-PT/images/code-with-confidence.png differ diff --git a/pt-PT/images/create-project-annotated.png b/pt-PT/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/pt-PT/images/create-project-annotated.png differ diff --git a/pt-PT/images/create-project.png b/pt-PT/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/pt-PT/images/create-project.png differ diff --git a/pt-PT/images/empty-buckets.png b/pt-PT/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/pt-PT/images/empty-buckets.png differ diff --git a/pt-PT/images/full-buckets.png b/pt-PT/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/pt-PT/images/full-buckets.png differ diff --git a/pt-PT/images/open-scratch-3-annotated.png b/pt-PT/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/pt-PT/images/open-scratch-3-annotated.png differ diff --git a/pt-PT/images/open-scratch-3.png b/pt-PT/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/pt-PT/images/open-scratch-3.png differ diff --git a/pt-PT/images/project-make-annotated.png b/pt-PT/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/pt-PT/images/project-make-annotated.png differ diff --git a/pt-PT/images/project-make.png b/pt-PT/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/pt-PT/images/project-make.png differ diff --git a/pt-PT/images/project-templates-annotated.png b/pt-PT/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/pt-PT/images/project-templates-annotated.png differ diff --git a/pt-PT/images/project-templates.png b/pt-PT/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/pt-PT/images/project-templates.png differ diff --git a/pt-PT/images/project-train-annotated.png b/pt-PT/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/pt-PT/images/project-train-annotated.png differ diff --git a/pt-PT/images/projects-list-annotated.png b/pt-PT/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/pt-PT/images/projects-list-annotated.png differ diff --git a/pt-PT/images/projects-list.png b/pt-PT/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/pt-PT/images/projects-list.png differ diff --git a/pt-PT/images/scratch-template-annotated.png b/pt-PT/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/pt-PT/images/scratch-template-annotated.png differ diff --git a/pt-PT/images/scratch-template.png b/pt-PT/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/pt-PT/images/scratch-template.png differ diff --git a/pt-PT/images/smart-classroom.gif b/pt-PT/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/pt-PT/images/smart-classroom.gif differ diff --git a/pt-PT/images/test-new-model-annotated.png b/pt-PT/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/pt-PT/images/test-new-model-annotated.png differ diff --git a/pt-PT/images/test-with-new-blocks-annotated.png b/pt-PT/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/pt-PT/images/test-with-new-blocks-annotated.png differ diff --git a/pt-PT/images/test-with-new-blocks.png b/pt-PT/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/pt-PT/images/test-with-new-blocks.png differ diff --git a/pt-PT/images/train-new-model-annotated.png b/pt-PT/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/pt-PT/images/train-new-model-annotated.png differ diff --git a/pt-PT/images/train-new-model.png b/pt-PT/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/pt-PT/images/train-new-model.png differ diff --git a/pt-PT/images/what-you-will-make.png b/pt-PT/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/pt-PT/images/what-you-will-make.png differ diff --git a/pt-PT/meta.yml b/pt-PT/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/pt-PT/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/pt-PT/step_1.md b/pt-PT/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/pt-PT/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/pt-PT/step_2.md b/pt-PT/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/pt-PT/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/pt-PT/step_3.md b/pt-PT/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/pt-PT/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/pt-PT/step_4.md b/pt-PT/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/pt-PT/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/pt-PT/step_5.md b/pt-PT/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/pt-PT/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/pt-PT/step_6.md b/pt-PT/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/pt-PT/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/pt-PT/step_7.md b/pt-PT/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/pt-PT/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/pt-PT/step_8.md b/pt-PT/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/pt-PT/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/pt-PT/step_9.md b/pt-PT/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/pt-PT/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/ro-RO/images/banner.png b/ro-RO/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/ro-RO/images/banner.png differ diff --git a/ro-RO/images/cheese-sandwich-annotated.png b/ro-RO/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/ro-RO/images/cheese-sandwich-annotated.png differ diff --git a/ro-RO/images/cheese-sandwich.png b/ro-RO/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/ro-RO/images/cheese-sandwich.png differ diff --git a/ro-RO/images/click-flag-annotated.png b/ro-RO/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/ro-RO/images/click-flag-annotated.png differ diff --git a/ro-RO/images/click-flag.png b/ro-RO/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/ro-RO/images/click-flag.png differ diff --git a/ro-RO/images/code-new-blocks.png b/ro-RO/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/ro-RO/images/code-new-blocks.png differ diff --git a/ro-RO/images/code-with-confidence.png b/ro-RO/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/ro-RO/images/code-with-confidence.png differ diff --git a/ro-RO/images/create-project-annotated.png b/ro-RO/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/ro-RO/images/create-project-annotated.png differ diff --git a/ro-RO/images/create-project.png b/ro-RO/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/ro-RO/images/create-project.png differ diff --git a/ro-RO/images/empty-buckets.png b/ro-RO/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/ro-RO/images/empty-buckets.png differ diff --git a/ro-RO/images/full-buckets.png b/ro-RO/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/ro-RO/images/full-buckets.png differ diff --git a/ro-RO/images/open-scratch-3-annotated.png b/ro-RO/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/ro-RO/images/open-scratch-3-annotated.png differ diff --git a/ro-RO/images/open-scratch-3.png b/ro-RO/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/ro-RO/images/open-scratch-3.png differ diff --git a/ro-RO/images/project-make-annotated.png b/ro-RO/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/ro-RO/images/project-make-annotated.png differ diff --git a/ro-RO/images/project-make.png b/ro-RO/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/ro-RO/images/project-make.png differ diff --git a/ro-RO/images/project-templates-annotated.png b/ro-RO/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/ro-RO/images/project-templates-annotated.png differ diff --git a/ro-RO/images/project-templates.png b/ro-RO/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/ro-RO/images/project-templates.png differ diff --git a/ro-RO/images/project-train-annotated.png b/ro-RO/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/ro-RO/images/project-train-annotated.png differ diff --git a/ro-RO/images/projects-list-annotated.png b/ro-RO/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/ro-RO/images/projects-list-annotated.png differ diff --git a/ro-RO/images/projects-list.png b/ro-RO/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/ro-RO/images/projects-list.png differ diff --git a/ro-RO/images/scratch-template-annotated.png b/ro-RO/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/ro-RO/images/scratch-template-annotated.png differ diff --git a/ro-RO/images/scratch-template.png b/ro-RO/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/ro-RO/images/scratch-template.png differ diff --git a/ro-RO/images/smart-classroom.gif b/ro-RO/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/ro-RO/images/smart-classroom.gif differ diff --git a/ro-RO/images/test-new-model-annotated.png b/ro-RO/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/ro-RO/images/test-new-model-annotated.png differ diff --git a/ro-RO/images/test-with-new-blocks-annotated.png b/ro-RO/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/ro-RO/images/test-with-new-blocks-annotated.png differ diff --git a/ro-RO/images/test-with-new-blocks.png b/ro-RO/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/ro-RO/images/test-with-new-blocks.png differ diff --git a/ro-RO/images/train-new-model-annotated.png b/ro-RO/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/ro-RO/images/train-new-model-annotated.png differ diff --git a/ro-RO/images/train-new-model.png b/ro-RO/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/ro-RO/images/train-new-model.png differ diff --git a/ro-RO/images/what-you-will-make.png b/ro-RO/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/ro-RO/images/what-you-will-make.png differ diff --git a/ro-RO/meta.yml b/ro-RO/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/ro-RO/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/ro-RO/step_1.md b/ro-RO/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/ro-RO/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/ro-RO/step_2.md b/ro-RO/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/ro-RO/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/ro-RO/step_3.md b/ro-RO/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/ro-RO/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/ro-RO/step_4.md b/ro-RO/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/ro-RO/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/ro-RO/step_5.md b/ro-RO/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/ro-RO/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/ro-RO/step_6.md b/ro-RO/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/ro-RO/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/ro-RO/step_7.md b/ro-RO/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/ro-RO/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/ro-RO/step_8.md b/ro-RO/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/ro-RO/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/ro-RO/step_9.md b/ro-RO/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/ro-RO/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/ru-RU/images/banner.png b/ru-RU/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/ru-RU/images/banner.png differ diff --git a/ru-RU/images/cheese-sandwich-annotated.png b/ru-RU/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/ru-RU/images/cheese-sandwich-annotated.png differ diff --git a/ru-RU/images/cheese-sandwich.png b/ru-RU/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/ru-RU/images/cheese-sandwich.png differ diff --git a/ru-RU/images/click-flag-annotated.png b/ru-RU/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/ru-RU/images/click-flag-annotated.png differ diff --git a/ru-RU/images/click-flag.png b/ru-RU/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/ru-RU/images/click-flag.png differ diff --git a/ru-RU/images/code-new-blocks.png b/ru-RU/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/ru-RU/images/code-new-blocks.png differ diff --git a/ru-RU/images/code-with-confidence.png b/ru-RU/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/ru-RU/images/code-with-confidence.png differ diff --git a/ru-RU/images/create-project-annotated.png b/ru-RU/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/ru-RU/images/create-project-annotated.png differ diff --git a/ru-RU/images/create-project.png b/ru-RU/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/ru-RU/images/create-project.png differ diff --git a/ru-RU/images/empty-buckets.png b/ru-RU/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/ru-RU/images/empty-buckets.png differ diff --git a/ru-RU/images/full-buckets.png b/ru-RU/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/ru-RU/images/full-buckets.png differ diff --git a/ru-RU/images/open-scratch-3-annotated.png b/ru-RU/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/ru-RU/images/open-scratch-3-annotated.png differ diff --git a/ru-RU/images/open-scratch-3.png b/ru-RU/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/ru-RU/images/open-scratch-3.png differ diff --git a/ru-RU/images/project-make-annotated.png b/ru-RU/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/ru-RU/images/project-make-annotated.png differ diff --git a/ru-RU/images/project-make.png b/ru-RU/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/ru-RU/images/project-make.png differ diff --git a/ru-RU/images/project-templates-annotated.png b/ru-RU/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/ru-RU/images/project-templates-annotated.png differ diff --git a/ru-RU/images/project-templates.png b/ru-RU/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/ru-RU/images/project-templates.png differ diff --git a/ru-RU/images/project-train-annotated.png b/ru-RU/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/ru-RU/images/project-train-annotated.png differ diff --git a/ru-RU/images/projects-list-annotated.png b/ru-RU/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/ru-RU/images/projects-list-annotated.png differ diff --git a/ru-RU/images/projects-list.png b/ru-RU/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/ru-RU/images/projects-list.png differ diff --git a/ru-RU/images/scratch-template-annotated.png b/ru-RU/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/ru-RU/images/scratch-template-annotated.png differ diff --git a/ru-RU/images/scratch-template.png b/ru-RU/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/ru-RU/images/scratch-template.png differ diff --git a/ru-RU/images/smart-classroom.gif b/ru-RU/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/ru-RU/images/smart-classroom.gif differ diff --git a/ru-RU/images/test-new-model-annotated.png b/ru-RU/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/ru-RU/images/test-new-model-annotated.png differ diff --git a/ru-RU/images/test-with-new-blocks-annotated.png b/ru-RU/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/ru-RU/images/test-with-new-blocks-annotated.png differ diff --git a/ru-RU/images/test-with-new-blocks.png b/ru-RU/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/ru-RU/images/test-with-new-blocks.png differ diff --git a/ru-RU/images/train-new-model-annotated.png b/ru-RU/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/ru-RU/images/train-new-model-annotated.png differ diff --git a/ru-RU/images/train-new-model.png b/ru-RU/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/ru-RU/images/train-new-model.png differ diff --git a/ru-RU/images/what-you-will-make.png b/ru-RU/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/ru-RU/images/what-you-will-make.png differ diff --git a/ru-RU/meta.yml b/ru-RU/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/ru-RU/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/ru-RU/step_1.md b/ru-RU/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/ru-RU/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/ru-RU/step_2.md b/ru-RU/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/ru-RU/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/ru-RU/step_3.md b/ru-RU/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/ru-RU/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/ru-RU/step_4.md b/ru-RU/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/ru-RU/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/ru-RU/step_5.md b/ru-RU/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/ru-RU/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/ru-RU/step_6.md b/ru-RU/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/ru-RU/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/ru-RU/step_7.md b/ru-RU/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/ru-RU/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/ru-RU/step_8.md b/ru-RU/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/ru-RU/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/ru-RU/step_9.md b/ru-RU/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/ru-RU/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/sr-SP/images/banner.png b/sr-SP/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/sr-SP/images/banner.png differ diff --git a/sr-SP/images/cheese-sandwich-annotated.png b/sr-SP/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/sr-SP/images/cheese-sandwich-annotated.png differ diff --git a/sr-SP/images/cheese-sandwich.png b/sr-SP/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/sr-SP/images/cheese-sandwich.png differ diff --git a/sr-SP/images/click-flag-annotated.png b/sr-SP/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/sr-SP/images/click-flag-annotated.png differ diff --git a/sr-SP/images/click-flag.png b/sr-SP/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/sr-SP/images/click-flag.png differ diff --git a/sr-SP/images/code-new-blocks.png b/sr-SP/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/sr-SP/images/code-new-blocks.png differ diff --git a/sr-SP/images/code-with-confidence.png b/sr-SP/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/sr-SP/images/code-with-confidence.png differ diff --git a/sr-SP/images/create-project-annotated.png b/sr-SP/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/sr-SP/images/create-project-annotated.png differ diff --git a/sr-SP/images/create-project.png b/sr-SP/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/sr-SP/images/create-project.png differ diff --git a/sr-SP/images/empty-buckets.png b/sr-SP/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/sr-SP/images/empty-buckets.png differ diff --git a/sr-SP/images/full-buckets.png b/sr-SP/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/sr-SP/images/full-buckets.png differ diff --git a/sr-SP/images/open-scratch-3-annotated.png b/sr-SP/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/sr-SP/images/open-scratch-3-annotated.png differ diff --git a/sr-SP/images/open-scratch-3.png b/sr-SP/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/sr-SP/images/open-scratch-3.png differ diff --git a/sr-SP/images/project-make-annotated.png b/sr-SP/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/sr-SP/images/project-make-annotated.png differ diff --git a/sr-SP/images/project-make.png b/sr-SP/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/sr-SP/images/project-make.png differ diff --git a/sr-SP/images/project-templates-annotated.png b/sr-SP/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/sr-SP/images/project-templates-annotated.png differ diff --git a/sr-SP/images/project-templates.png b/sr-SP/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/sr-SP/images/project-templates.png differ diff --git a/sr-SP/images/project-train-annotated.png b/sr-SP/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/sr-SP/images/project-train-annotated.png differ diff --git a/sr-SP/images/projects-list-annotated.png b/sr-SP/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/sr-SP/images/projects-list-annotated.png differ diff --git a/sr-SP/images/projects-list.png b/sr-SP/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/sr-SP/images/projects-list.png differ diff --git a/sr-SP/images/scratch-template-annotated.png b/sr-SP/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/sr-SP/images/scratch-template-annotated.png differ diff --git a/sr-SP/images/scratch-template.png b/sr-SP/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/sr-SP/images/scratch-template.png differ diff --git a/sr-SP/images/smart-classroom.gif b/sr-SP/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/sr-SP/images/smart-classroom.gif differ diff --git a/sr-SP/images/test-new-model-annotated.png b/sr-SP/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/sr-SP/images/test-new-model-annotated.png differ diff --git a/sr-SP/images/test-with-new-blocks-annotated.png b/sr-SP/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/sr-SP/images/test-with-new-blocks-annotated.png differ diff --git a/sr-SP/images/test-with-new-blocks.png b/sr-SP/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/sr-SP/images/test-with-new-blocks.png differ diff --git a/sr-SP/images/train-new-model-annotated.png b/sr-SP/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/sr-SP/images/train-new-model-annotated.png differ diff --git a/sr-SP/images/train-new-model.png b/sr-SP/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/sr-SP/images/train-new-model.png differ diff --git a/sr-SP/images/what-you-will-make.png b/sr-SP/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/sr-SP/images/what-you-will-make.png differ diff --git a/sr-SP/meta.yml b/sr-SP/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/sr-SP/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/sr-SP/step_1.md b/sr-SP/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/sr-SP/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/sr-SP/step_2.md b/sr-SP/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/sr-SP/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/sr-SP/step_3.md b/sr-SP/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/sr-SP/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/sr-SP/step_4.md b/sr-SP/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/sr-SP/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/sr-SP/step_5.md b/sr-SP/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/sr-SP/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/sr-SP/step_6.md b/sr-SP/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/sr-SP/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/sr-SP/step_7.md b/sr-SP/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/sr-SP/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/sr-SP/step_8.md b/sr-SP/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/sr-SP/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/sr-SP/step_9.md b/sr-SP/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/sr-SP/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/sv-SE/images/banner.png b/sv-SE/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/sv-SE/images/banner.png differ diff --git a/sv-SE/images/cheese-sandwich-annotated.png b/sv-SE/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/sv-SE/images/cheese-sandwich-annotated.png differ diff --git a/sv-SE/images/cheese-sandwich.png b/sv-SE/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/sv-SE/images/cheese-sandwich.png differ diff --git a/sv-SE/images/click-flag-annotated.png b/sv-SE/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/sv-SE/images/click-flag-annotated.png differ diff --git a/sv-SE/images/click-flag.png b/sv-SE/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/sv-SE/images/click-flag.png differ diff --git a/sv-SE/images/code-new-blocks.png b/sv-SE/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/sv-SE/images/code-new-blocks.png differ diff --git a/sv-SE/images/code-with-confidence.png b/sv-SE/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/sv-SE/images/code-with-confidence.png differ diff --git a/sv-SE/images/create-project-annotated.png b/sv-SE/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/sv-SE/images/create-project-annotated.png differ diff --git a/sv-SE/images/create-project.png b/sv-SE/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/sv-SE/images/create-project.png differ diff --git a/sv-SE/images/empty-buckets.png b/sv-SE/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/sv-SE/images/empty-buckets.png differ diff --git a/sv-SE/images/full-buckets.png b/sv-SE/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/sv-SE/images/full-buckets.png differ diff --git a/sv-SE/images/open-scratch-3-annotated.png b/sv-SE/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/sv-SE/images/open-scratch-3-annotated.png differ diff --git a/sv-SE/images/open-scratch-3.png b/sv-SE/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/sv-SE/images/open-scratch-3.png differ diff --git a/sv-SE/images/project-make-annotated.png b/sv-SE/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/sv-SE/images/project-make-annotated.png differ diff --git a/sv-SE/images/project-make.png b/sv-SE/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/sv-SE/images/project-make.png differ diff --git a/sv-SE/images/project-templates-annotated.png b/sv-SE/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/sv-SE/images/project-templates-annotated.png differ diff --git a/sv-SE/images/project-templates.png b/sv-SE/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/sv-SE/images/project-templates.png differ diff --git a/sv-SE/images/project-train-annotated.png b/sv-SE/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/sv-SE/images/project-train-annotated.png differ diff --git a/sv-SE/images/projects-list-annotated.png b/sv-SE/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/sv-SE/images/projects-list-annotated.png differ diff --git a/sv-SE/images/projects-list.png b/sv-SE/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/sv-SE/images/projects-list.png differ diff --git a/sv-SE/images/scratch-template-annotated.png b/sv-SE/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/sv-SE/images/scratch-template-annotated.png differ diff --git a/sv-SE/images/scratch-template.png b/sv-SE/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/sv-SE/images/scratch-template.png differ diff --git a/sv-SE/images/smart-classroom.gif b/sv-SE/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/sv-SE/images/smart-classroom.gif differ diff --git a/sv-SE/images/test-new-model-annotated.png b/sv-SE/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/sv-SE/images/test-new-model-annotated.png differ diff --git a/sv-SE/images/test-with-new-blocks-annotated.png b/sv-SE/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/sv-SE/images/test-with-new-blocks-annotated.png differ diff --git a/sv-SE/images/test-with-new-blocks.png b/sv-SE/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/sv-SE/images/test-with-new-blocks.png differ diff --git a/sv-SE/images/train-new-model-annotated.png b/sv-SE/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/sv-SE/images/train-new-model-annotated.png differ diff --git a/sv-SE/images/train-new-model.png b/sv-SE/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/sv-SE/images/train-new-model.png differ diff --git a/sv-SE/images/what-you-will-make.png b/sv-SE/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/sv-SE/images/what-you-will-make.png differ diff --git a/sv-SE/meta.yml b/sv-SE/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/sv-SE/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/sv-SE/step_1.md b/sv-SE/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/sv-SE/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/sv-SE/step_2.md b/sv-SE/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/sv-SE/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/sv-SE/step_3.md b/sv-SE/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/sv-SE/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/sv-SE/step_4.md b/sv-SE/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/sv-SE/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/sv-SE/step_5.md b/sv-SE/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/sv-SE/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/sv-SE/step_6.md b/sv-SE/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/sv-SE/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/sv-SE/step_7.md b/sv-SE/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/sv-SE/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/sv-SE/step_8.md b/sv-SE/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/sv-SE/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/sv-SE/step_9.md b/sv-SE/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/sv-SE/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/tr-TR/images/banner.png b/tr-TR/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/tr-TR/images/banner.png differ diff --git a/tr-TR/images/cheese-sandwich-annotated.png b/tr-TR/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/tr-TR/images/cheese-sandwich-annotated.png differ diff --git a/tr-TR/images/cheese-sandwich.png b/tr-TR/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/tr-TR/images/cheese-sandwich.png differ diff --git a/tr-TR/images/click-flag-annotated.png b/tr-TR/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/tr-TR/images/click-flag-annotated.png differ diff --git a/tr-TR/images/click-flag.png b/tr-TR/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/tr-TR/images/click-flag.png differ diff --git a/tr-TR/images/code-new-blocks.png b/tr-TR/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/tr-TR/images/code-new-blocks.png differ diff --git a/tr-TR/images/code-with-confidence.png b/tr-TR/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/tr-TR/images/code-with-confidence.png differ diff --git a/tr-TR/images/create-project-annotated.png b/tr-TR/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/tr-TR/images/create-project-annotated.png differ diff --git a/tr-TR/images/create-project.png b/tr-TR/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/tr-TR/images/create-project.png differ diff --git a/tr-TR/images/empty-buckets.png b/tr-TR/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/tr-TR/images/empty-buckets.png differ diff --git a/tr-TR/images/full-buckets.png b/tr-TR/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/tr-TR/images/full-buckets.png differ diff --git a/tr-TR/images/open-scratch-3-annotated.png b/tr-TR/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/tr-TR/images/open-scratch-3-annotated.png differ diff --git a/tr-TR/images/open-scratch-3.png b/tr-TR/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/tr-TR/images/open-scratch-3.png differ diff --git a/tr-TR/images/project-make-annotated.png b/tr-TR/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/tr-TR/images/project-make-annotated.png differ diff --git a/tr-TR/images/project-make.png b/tr-TR/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/tr-TR/images/project-make.png differ diff --git a/tr-TR/images/project-templates-annotated.png b/tr-TR/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/tr-TR/images/project-templates-annotated.png differ diff --git a/tr-TR/images/project-templates.png b/tr-TR/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/tr-TR/images/project-templates.png differ diff --git a/tr-TR/images/project-train-annotated.png b/tr-TR/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/tr-TR/images/project-train-annotated.png differ diff --git a/tr-TR/images/projects-list-annotated.png b/tr-TR/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/tr-TR/images/projects-list-annotated.png differ diff --git a/tr-TR/images/projects-list.png b/tr-TR/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/tr-TR/images/projects-list.png differ diff --git a/tr-TR/images/scratch-template-annotated.png b/tr-TR/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/tr-TR/images/scratch-template-annotated.png differ diff --git a/tr-TR/images/scratch-template.png b/tr-TR/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/tr-TR/images/scratch-template.png differ diff --git a/tr-TR/images/smart-classroom.gif b/tr-TR/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/tr-TR/images/smart-classroom.gif differ diff --git a/tr-TR/images/test-new-model-annotated.png b/tr-TR/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/tr-TR/images/test-new-model-annotated.png differ diff --git a/tr-TR/images/test-with-new-blocks-annotated.png b/tr-TR/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/tr-TR/images/test-with-new-blocks-annotated.png differ diff --git a/tr-TR/images/test-with-new-blocks.png b/tr-TR/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/tr-TR/images/test-with-new-blocks.png differ diff --git a/tr-TR/images/train-new-model-annotated.png b/tr-TR/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/tr-TR/images/train-new-model-annotated.png differ diff --git a/tr-TR/images/train-new-model.png b/tr-TR/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/tr-TR/images/train-new-model.png differ diff --git a/tr-TR/images/what-you-will-make.png b/tr-TR/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/tr-TR/images/what-you-will-make.png differ diff --git a/tr-TR/meta.yml b/tr-TR/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/tr-TR/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/tr-TR/step_1.md b/tr-TR/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/tr-TR/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/tr-TR/step_2.md b/tr-TR/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/tr-TR/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/tr-TR/step_3.md b/tr-TR/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/tr-TR/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/tr-TR/step_4.md b/tr-TR/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/tr-TR/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/tr-TR/step_5.md b/tr-TR/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/tr-TR/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/tr-TR/step_6.md b/tr-TR/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/tr-TR/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/tr-TR/step_7.md b/tr-TR/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/tr-TR/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/tr-TR/step_8.md b/tr-TR/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/tr-TR/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/tr-TR/step_9.md b/tr-TR/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/tr-TR/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/uk-UA/images/banner.png b/uk-UA/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/uk-UA/images/banner.png differ diff --git a/uk-UA/images/cheese-sandwich-annotated.png b/uk-UA/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/uk-UA/images/cheese-sandwich-annotated.png differ diff --git a/uk-UA/images/cheese-sandwich.png b/uk-UA/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/uk-UA/images/cheese-sandwich.png differ diff --git a/uk-UA/images/classroom-devices.png b/uk-UA/images/classroom-devices.png new file mode 100644 index 0000000..a25ed48 Binary files /dev/null and b/uk-UA/images/classroom-devices.png differ diff --git a/uk-UA/images/click-flag-annotated.png b/uk-UA/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/uk-UA/images/click-flag-annotated.png differ diff --git a/uk-UA/images/click-flag.png b/uk-UA/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/uk-UA/images/click-flag.png differ diff --git a/uk-UA/images/code-new-blocks.png b/uk-UA/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/uk-UA/images/code-new-blocks.png differ diff --git a/uk-UA/images/code-with-confidence.png b/uk-UA/images/code-with-confidence.png new file mode 100644 index 0000000..08fb60b Binary files /dev/null and b/uk-UA/images/code-with-confidence.png differ diff --git a/uk-UA/images/create-project-annotated.png b/uk-UA/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/uk-UA/images/create-project-annotated.png differ diff --git a/uk-UA/images/create-project.png b/uk-UA/images/create-project.png new file mode 100644 index 0000000..7ce2672 Binary files /dev/null and b/uk-UA/images/create-project.png differ diff --git a/uk-UA/images/empty-buckets.png b/uk-UA/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/uk-UA/images/empty-buckets.png differ diff --git a/uk-UA/images/fan-on-and-off.png b/uk-UA/images/fan-on-and-off.png new file mode 100644 index 0000000..76beb24 Binary files /dev/null and b/uk-UA/images/fan-on-and-off.png differ diff --git a/uk-UA/images/full-buckets.png b/uk-UA/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/uk-UA/images/full-buckets.png differ diff --git a/uk-UA/images/new-blocks-menu.png b/uk-UA/images/new-blocks-menu.png new file mode 100644 index 0000000..b1d716a Binary files /dev/null and b/uk-UA/images/new-blocks-menu.png differ diff --git a/uk-UA/images/open-scratch-3-annotated.png b/uk-UA/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/uk-UA/images/open-scratch-3-annotated.png differ diff --git a/uk-UA/images/open-scratch-3.png b/uk-UA/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/uk-UA/images/open-scratch-3.png differ diff --git a/uk-UA/images/play-music.png b/uk-UA/images/play-music.png new file mode 100644 index 0000000..852902d Binary files /dev/null and b/uk-UA/images/play-music.png differ diff --git a/uk-UA/images/project-make-annotated.png b/uk-UA/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/uk-UA/images/project-make-annotated.png differ diff --git a/uk-UA/images/project-make.png b/uk-UA/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/uk-UA/images/project-make.png differ diff --git a/uk-UA/images/project-templates-annotated.png b/uk-UA/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/uk-UA/images/project-templates-annotated.png differ diff --git a/uk-UA/images/project-templates.png b/uk-UA/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/uk-UA/images/project-templates.png differ diff --git a/uk-UA/images/project-train-annotated.png b/uk-UA/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/uk-UA/images/project-train-annotated.png differ diff --git a/uk-UA/images/project-train.png b/uk-UA/images/project-train.png new file mode 100644 index 0000000..3a3bdb9 Binary files /dev/null and b/uk-UA/images/project-train.png differ diff --git a/uk-UA/images/projects-list-annotated.png b/uk-UA/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/uk-UA/images/projects-list-annotated.png differ diff --git a/uk-UA/images/projects-list.png b/uk-UA/images/projects-list.png new file mode 100644 index 0000000..1721755 Binary files /dev/null and b/uk-UA/images/projects-list.png differ diff --git a/uk-UA/images/save-to-computer.png b/uk-UA/images/save-to-computer.png new file mode 100644 index 0000000..3d67093 Binary files /dev/null and b/uk-UA/images/save-to-computer.png differ diff --git a/uk-UA/images/scratch-template-annotated.png b/uk-UA/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/uk-UA/images/scratch-template-annotated.png differ diff --git a/uk-UA/images/scratch-template.png b/uk-UA/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/uk-UA/images/scratch-template.png differ diff --git a/uk-UA/images/smart-classroom.gif b/uk-UA/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/uk-UA/images/smart-classroom.gif differ diff --git a/uk-UA/images/smart-classroom.png b/uk-UA/images/smart-classroom.png new file mode 100644 index 0000000..e166aaa Binary files /dev/null and b/uk-UA/images/smart-classroom.png differ diff --git a/uk-UA/images/test-model.png b/uk-UA/images/test-model.png new file mode 100644 index 0000000..7dad5b5 Binary files /dev/null and b/uk-UA/images/test-model.png differ diff --git a/uk-UA/images/test-new-model-annotated.png b/uk-UA/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/uk-UA/images/test-new-model-annotated.png differ diff --git a/uk-UA/images/test-with-new-blocks-annotated.png b/uk-UA/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/uk-UA/images/test-with-new-blocks-annotated.png differ diff --git a/uk-UA/images/test-with-new-blocks.png b/uk-UA/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/uk-UA/images/test-with-new-blocks.png differ diff --git a/uk-UA/images/train-new-model-annotated.png b/uk-UA/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/uk-UA/images/train-new-model-annotated.png differ diff --git a/uk-UA/images/train-new-model.png b/uk-UA/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/uk-UA/images/train-new-model.png differ diff --git a/uk-UA/images/turn-fan-off.png b/uk-UA/images/turn-fan-off.png new file mode 100644 index 0000000..00280bb Binary files /dev/null and b/uk-UA/images/turn-fan-off.png differ diff --git a/uk-UA/images/turn-fan-on.png b/uk-UA/images/turn-fan-on.png new file mode 100644 index 0000000..86d49d4 Binary files /dev/null and b/uk-UA/images/turn-fan-on.png differ diff --git a/uk-UA/images/what-you-will-make.png b/uk-UA/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/uk-UA/images/what-you-will-make.png differ diff --git a/uk-UA/images/whatyouwillmake.gif b/uk-UA/images/whatyouwillmake.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/uk-UA/images/whatyouwillmake.gif differ diff --git a/uk-UA/meta.yml b/uk-UA/meta.yml new file mode 100644 index 0000000..04d5514 --- /dev/null +++ b/uk-UA/meta.yml @@ -0,0 +1,24 @@ +--- +title: Smart assistant +hero_image: images/banner.png +description: Create a virtual assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: '2019-05-09' +steps: +- title: What you will make +- title: Set up the project +- title: Example commands + completion: + - engaged +- title: Train the model +- title: Create the assistant +- title: 'Challenge' + challenge: true +- title: Confidence scores + completion: + - internal +- title: What can you do now? + completion: + - external diff --git a/uk-UA/resources/NEW smart assistant 1.mp4 b/uk-UA/resources/NEW smart assistant 1.mp4 new file mode 100644 index 0000000..51c457d Binary files /dev/null and b/uk-UA/resources/NEW smart assistant 1.mp4 differ diff --git a/uk-UA/resources/NEW smart assistant 1.srt b/uk-UA/resources/NEW smart assistant 1.srt new file mode 100644 index 0000000..25e9bad --- /dev/null +++ b/uk-UA/resources/NEW smart assistant 1.srt @@ -0,0 +1,26 @@ +1 +00:00:04,040 --> 00:00:09,880 +Go to rpf.io/ML4K and get started. + +2 +00:00:09,880 --> 00:00:12,640 +Click 'Try it now'. + +3 +00:00:12,640 --> 00:00:16,960 +Add a new project called 'Smart assistant'. + +4 +00:00:16,960 --> 00:00:23,240 +Set it to recognise English text, and  +store the data in your web browser. + +5 +00:00:23,240 --> 00:00:29,000 +Create the project, then  +click on the project's name. + +6 +00:00:29,000 --> 00:00:29,960 +Now, click 'Train'. + diff --git a/uk-UA/resources/NEW smart assistant 2.mp4 b/uk-UA/resources/NEW smart assistant 2.mp4 new file mode 100644 index 0000000..2e90362 Binary files /dev/null and b/uk-UA/resources/NEW smart assistant 2.mp4 differ diff --git a/uk-UA/resources/NEW smart assistant 2.srt b/uk-UA/resources/NEW smart assistant 2.srt new file mode 100644 index 0000000..aec96dd --- /dev/null +++ b/uk-UA/resources/NEW smart assistant 2.srt @@ -0,0 +1,27 @@ +1 +00:00:04,320 --> 00:00:06,080 +Add two new labels + +2 +00:00:06,080 --> 00:00:08,400 +one for fan on, + +3 +00:00:08,400 --> 00:00:12,760 +and one for fan off. + +4 +00:00:12,760 --> 00:00:18,200 +Type an example of a command  +that should turn the fan on. + +5 +00:00:18,200 --> 00:00:23,240 +Then repeat until you have  +eight different examples. + +6 +00:00:23,240 --> 00:00:29,960 +Now you need eight different examples  +of commands to turn the fan off too. + diff --git a/uk-UA/resources/NEW smart assistant 3.mp4 b/uk-UA/resources/NEW smart assistant 3.mp4 new file mode 100644 index 0000000..2fdde5f Binary files /dev/null and b/uk-UA/resources/NEW smart assistant 3.mp4 differ diff --git a/uk-UA/resources/NEW smart assistant 3.srt b/uk-UA/resources/NEW smart assistant 3.srt new file mode 100644 index 0000000..9870fbf --- /dev/null +++ b/uk-UA/resources/NEW smart assistant 3.srt @@ -0,0 +1,19 @@ +1 +00:00:03,600 --> 00:00:10,200 +Click 'Back', then click 'Learn & Test'. + +2 +00:00:10,200 --> 00:00:14,720 +Click the button to start training  +your model - it might take a while. + +3 +00:00:14,720 --> 00:00:18,120 +When it is done, type in a  +command to test the model. + +4 +00:00:18,120 --> 00:00:29,960 +Check that your command gives  +the result you were expecting. + diff --git a/uk-UA/resources/NEW smart assistant 4.mp4 b/uk-UA/resources/NEW smart assistant 4.mp4 new file mode 100644 index 0000000..a295711 Binary files /dev/null and b/uk-UA/resources/NEW smart assistant 4.mp4 differ diff --git a/uk-UA/resources/NEW smart assistant 4.srt b/uk-UA/resources/NEW smart assistant 4.srt new file mode 100644 index 0000000..a68f19a --- /dev/null +++ b/uk-UA/resources/NEW smart assistant 4.srt @@ -0,0 +1,28 @@ +1 +00:00:03,800 --> 00:00:12,600 +Now click 'Back' again, then click  +'Make' to use the model in Scratch. + +2 +00:00:12,600 --> 00:00:20,160 +Go to 'Project templates' and select  +the 'Smart Classroom' template. + +3 +00:00:20,160 --> 00:00:29,960 +Now, drag the code blocks to ask for  +a command and turn the fan on or off. + +4 +00:00:38,160 --> 00:00:48,800 +You can right click and select  +'Duplicate' to save time. + +5 +00:00:48,800 --> 00:00:54,240 +Now type in a command to control the fan. + +6 +00:00:54,240 --> 00:00:59,920 +Type another command to turn it off. + diff --git a/uk-UA/resources/NEW smart assistant 5.mp4 b/uk-UA/resources/NEW smart assistant 5.mp4 new file mode 100644 index 0000000..945ac64 Binary files /dev/null and b/uk-UA/resources/NEW smart assistant 5.mp4 differ diff --git a/uk-UA/resources/NEW smart assistant 5.srt b/uk-UA/resources/NEW smart assistant 5.srt new file mode 100644 index 0000000..f1c1642 --- /dev/null +++ b/uk-UA/resources/NEW smart assistant 5.srt @@ -0,0 +1,33 @@ +1 +00:00:04,120 --> 00:00:07,480 +Go back to the 'Learn & Test' page. + +2 +00:00:07,480 --> 00:00:11,440 +Type something that is not  +related to lamps or fans. + +3 +00:00:11,440 --> 00:00:15,240 +The model should show a low  +confidence about its prediction. + +4 +00:00:15,240 --> 00:00:19,520 +Go back to Scratch, and move all of  +your 'if' statements to one side. + +5 +00:00:19,520 --> 00:00:26,880 +Add an 'if/else' so that your code can  +check the model's confidence level. + +6 +00:00:26,880 --> 00:00:36,640 +The light or fan should only be changed  +if the confidence level is over 70. + +7 +00:00:36,640 --> 00:00:46,640 +Try it out! + diff --git a/uk-UA/resources/readme.txt b/uk-UA/resources/readme.txt new file mode 100644 index 0000000..663519d --- /dev/null +++ b/uk-UA/resources/readme.txt @@ -0,0 +1 @@ +Щоб переглянути відео із субтитрами у програмі VLC (videolan.org), переконайся, що файл відео і файл субтитрів знаходяться в одній папці та мають однакові назви (наприклад, video.mp4 і video.srt). Відкрий відео у VLC, і програма автоматично завантажить субтитри. Якщо субтитри не з’являються, клацни правою кнопкою миші по відео, вибери **Субтитри**, потім **Додати файл субтитрів** і вибери відповідний файл .srt. Приємного перегляду з субтитрами! \ No newline at end of file diff --git a/uk-UA/step_1.md b/uk-UA/step_1.md new file mode 100644 index 0000000..28b53d0 --- /dev/null +++ b/uk-UA/step_1.md @@ -0,0 +1,33 @@ +## What you will make + +Create a smart virtual assistant that reacts to your commands. + +![A Scratch project with a fan and a light and a box to type in](images/whatyouwillmake.gif) + +\--- collapse --- + +--- + +## title: Where are my commands stored? + +- This project uses a technology called 'machine learning'. Machine learning systems are trained using a large amount of data. +- This project does not require you to create an account or log in. For this project, the examples you use to make the model are only stored temporarily in your browser (only on your machine). + \--- /collapse --- + +## --- collapse --- + +## title: No YouTube? Download the videos! + +You can [download all the videos for this project](https://rpf.io/p/en/smart-assistant-go){:target="_blank"}. + +\--- /collapse --- + +## --- collapse --- + +## title: License + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. + +\--- /collapse --- \ No newline at end of file diff --git a/uk-UA/step_2.md b/uk-UA/step_2.md new file mode 100644 index 0000000..f50a1e4 --- /dev/null +++ b/uk-UA/step_2.md @@ -0,0 +1,38 @@ +## Set up the project + + +
+ +
+ + +\--- task --- + +- Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + +- Click on **Get started**. + +- Click on **Try it now**. + +\--- /task --- + +\--- task --- + +- Click on **Projects** in the menu bar at the top. + +- Click on the **+ Add a new project** button. + +- Name your project `Smart assistant` and set it to learn to recognise **text**, and store data **in your web browser**. Then click on **Create**. + ![Creating a project](images/create-project.png) + +- You should now see 'Smart assistant' in the projects list. Click on the project. + ![Project list with smart assistant listed](images/projects-list.png) + +\--- /task --- + +\--- task --- + +- Click on the **Train** button. + ![Project main menu with arrow pointing to Train button](images/project-train.png) + +\--- /task --- diff --git a/uk-UA/step_3.md b/uk-UA/step_3.md new file mode 100644 index 0000000..f0e0183 --- /dev/null +++ b/uk-UA/step_3.md @@ -0,0 +1,45 @@ +## Example commands + + +
+ +
+ + +The room has two devices: a fan and a light. + +![A fan and a light](images/classroom-devices.png) + +Your assistant needs some examples of the phrases you might say when you want each device to be turned on or off. For example, to **turn the fan on**, you might say: + +- "Turn on the fan" +- "Start the fan" +- "Please could you switch on the fan" +- "I'm too hot" +- "It's hot in here" + +\--- task --- + +- Click on **+ Add new label** on the top right and add the label “fan on”. + +\--- /task --- + +\--- task --- + +- Click on **Add example** and type in `Turn on the fan`. + +\--- /task --- + +\--- task --- + +- Continue to click on the **Add example** button and add different ways of asking for the fan to be turned on until you have eight different ways of asking. + +\--- /task --- + +\--- task --- + +- Click on **Add new label**, but this time create the label "fan off". Add eight different examples of ways you could ask for the fan to be turned off. + +\--- /task --- + +![Fan on and off categories with eight examples of commands in each](images/fan-on-and-off.png) diff --git a/uk-UA/step_4.md b/uk-UA/step_4.md new file mode 100644 index 0000000..ffc4547 --- /dev/null +++ b/uk-UA/step_4.md @@ -0,0 +1,40 @@ +## Train the model + + +
+ +
+ + +Now that you have some example data, you can train the machine learning model to label a command as either 'fan on' or 'fan off' based on your examples. + +\--- task --- + +- Click on the **< Back to project** link, then click on **Learn & Test**. + +\--- /task --- + +\--- task --- + +- Click on the **Train new machine learning model** button. + +\--- /task --- + +Wait for the training to complete — this might take a minute or two. Once the training has completed, a test box appears. + +\--- task --- + +- Type in `put the fan on` and check that the model labels this input as 'fan on'. + ![Type in put the fan on to see if it is recognised](images/test-model.png) + +\--- /task --- + +\--- task --- + +- Try typing in some other commands for turning the fan on and off, and check that they are given the label you expect. + +\--- /task --- + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + + diff --git a/uk-UA/step_5.md b/uk-UA/step_5.md new file mode 100644 index 0000000..35d90e3 --- /dev/null +++ b/uk-UA/step_5.md @@ -0,0 +1,59 @@ +## Create the assistant + + +
+ +
+ + +Now that your model can distinguish between commands, you can use it in a Scratch program to create your smart assistant. + +\--- task --- + +- Click on the **< Back to project** link. + +- Click on **Make**. + +- Click on **Scratch 3**. + +- Click on **Open in Scratch 3**. + +\--- /task --- + +\--- task --- + +- Click on **Project templates** at the top and select the 'Smart classroom' project to load the fan and light sprites. This project also contains pre-made yellow `broadcast` blocks, which can be found under **Events**. + +![Smart classroom project is selected in the Scratch templates](images/smart-classroom.png) + +\--- /task --- + +Machine Learning for Kids has added some special blocks to Scratch to allow you to use the model you just trained. Find them at the bottom of the blocks list. + +![New 'smart assistant' blocks shown in the menu underneath Images](images/new-blocks-menu.png) + +\--- task --- + +- Make sure you have the **Classroom** sprite selected, then click on the **Code** tab and add this code: + +![New scratch code: when flag clicked, forever, ask 'enter your command' and wait. If recognise text (answer) label = fan on, then broadcast turn-fan-on ](images/turn-fan-on.png) + +\--- /task --- + +\--- task --- + +- Right click on the `if` block and select **Duplicate** to add a copy of the whole block of code, and put it directly underneath the first `if`. + +- Change the second copy of the block so that it recognises the text for turning the fan **off**, and broadcasts **turn-fan-off**. + +![New scratch code: If recognise text (answer) label = fan off, then broadcast turn-fan-off](images/turn-fan-off.png) + +\--- /task --- + +\--- task --- + +- Click the **green flag** and type in a command to turn the fan on or off. Check that it has the result you expected. + +- Make sure you test that the assistant performs the correct action **even for commands that you didn’t include as examples**. + +\--- /task --- diff --git a/uk-UA/step_6.md b/uk-UA/step_6.md new file mode 100644 index 0000000..10a58e5 --- /dev/null +++ b/uk-UA/step_6.md @@ -0,0 +1,65 @@ +## Challenge + +\--- challenge --- + +Follow the same steps as before to allow the assistant to also control the light. + +\--- task --- + +- Save a copy of your Scratch project to your computer so that you can easily reload it later to use with your new model. + +![Click on File and then Save to computer](images/save-to-computer.png) + +\--- /task --- + +\--- task --- + +- Go back to your model (**Back to project** > **Train**) and add two more labels: `light_on` and `light_off`. + +\--- /task --- + +\--- task --- + +- Add eight examples of commands you might use to turn the light on. + +\--- /task --- + +\--- task --- + +- Add eight examples of commands you might use to turn the light off. + +\--- /task --- + +\--- task --- + +- Re-train your model (**Back to project** > **Learn and Test**) so that it can also recognise commands for turning the light on and off. + +\--- /task --- + +\--- task --- + +- Load your new model into Scratch (**Make** > **Scratch 3** > **Open in Scratch 3**). + +- In Scratch, reload the code you saved earlier (**File** > **Load from my computer**). + +- Add two more `if` blocks to your program so that you can type commands to control the light. + +## --- collapse --- + +## title: I can't see the blocks for light_on / light_off + +If you have trained a new model, you will need to close Scratch and then re-open it from the Machine Learning for Kids website for any new blocks to appear. + +Click **Make** > **Scratch 3** > **Open in Scratch 3**. + +\--- /collapse --- + +\--- /task --- + +\--- task --- + +- Test whether your program works by typing in commands to turn the light on and off, and checking whether the outcome is as you expected. + +\--- /task --- + +\--- /challenge --- diff --git a/uk-UA/step_7.md b/uk-UA/step_7.md new file mode 100644 index 0000000..2fbec40 --- /dev/null +++ b/uk-UA/step_7.md @@ -0,0 +1,39 @@ +## Confidence scores + + +
+ +
+ + +The model can tell you how **confident** it is about whether it is correct. + +\--- task --- + +- Go back to the **Learn & Test** page in the training tool. + +- Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'play some music'. + +![Result of entering "play some music" is fan on with 36% confidence](images/play-music.png) + +\--- /task --- + +The **confidence score** is the program’s way of telling you how likely it is that it has labelled the command correctly. + +\--- task --- + +- Return to Scratch. + +- Add some new code so that the assistant will tell you it didn't understand the command if the confidence score is less than 70%. + +![New Scratch code: If recognise text (answer) confidence < 70, say 'Sorry I didn't understand that' for 2 seconds](images/code-with-confidence.png) + +\--- /task --- + +\--- task --- + +- Click the **green flag** and test your program to check that your assistant reacts in the right way: + - Type in commands that have nothing to do with the fan or lamp + - Ask for something to be turned on or off + +\--- /task --- diff --git a/uk-UA/step_8.md b/uk-UA/step_8.md new file mode 100644 index 0000000..d4b22e9 --- /dev/null +++ b/uk-UA/step_8.md @@ -0,0 +1,3 @@ +## What can you do now? + +There are lots of other machine learning and AI projects in the [Machine learning with Scratch](https://projects.raspberrypi.org/en/pathways/scratch-machine-learning) pathway. diff --git a/uk-UA/step_9.md b/uk-UA/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/uk-UA/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/vi-VN/images/banner.png b/vi-VN/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/vi-VN/images/banner.png differ diff --git a/vi-VN/images/cheese-sandwich-annotated.png b/vi-VN/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/vi-VN/images/cheese-sandwich-annotated.png differ diff --git a/vi-VN/images/cheese-sandwich.png b/vi-VN/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/vi-VN/images/cheese-sandwich.png differ diff --git a/vi-VN/images/click-flag-annotated.png b/vi-VN/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/vi-VN/images/click-flag-annotated.png differ diff --git a/vi-VN/images/click-flag.png b/vi-VN/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/vi-VN/images/click-flag.png differ diff --git a/vi-VN/images/code-new-blocks.png b/vi-VN/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/vi-VN/images/code-new-blocks.png differ diff --git a/vi-VN/images/code-with-confidence.png b/vi-VN/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/vi-VN/images/code-with-confidence.png differ diff --git a/vi-VN/images/create-project-annotated.png b/vi-VN/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/vi-VN/images/create-project-annotated.png differ diff --git a/vi-VN/images/create-project.png b/vi-VN/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/vi-VN/images/create-project.png differ diff --git a/vi-VN/images/empty-buckets.png b/vi-VN/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/vi-VN/images/empty-buckets.png differ diff --git a/vi-VN/images/full-buckets.png b/vi-VN/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/vi-VN/images/full-buckets.png differ diff --git a/vi-VN/images/open-scratch-3-annotated.png b/vi-VN/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/vi-VN/images/open-scratch-3-annotated.png differ diff --git a/vi-VN/images/open-scratch-3.png b/vi-VN/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/vi-VN/images/open-scratch-3.png differ diff --git a/vi-VN/images/project-make-annotated.png b/vi-VN/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/vi-VN/images/project-make-annotated.png differ diff --git a/vi-VN/images/project-make.png b/vi-VN/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/vi-VN/images/project-make.png differ diff --git a/vi-VN/images/project-templates-annotated.png b/vi-VN/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/vi-VN/images/project-templates-annotated.png differ diff --git a/vi-VN/images/project-templates.png b/vi-VN/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/vi-VN/images/project-templates.png differ diff --git a/vi-VN/images/project-train-annotated.png b/vi-VN/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/vi-VN/images/project-train-annotated.png differ diff --git a/vi-VN/images/projects-list-annotated.png b/vi-VN/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/vi-VN/images/projects-list-annotated.png differ diff --git a/vi-VN/images/projects-list.png b/vi-VN/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/vi-VN/images/projects-list.png differ diff --git a/vi-VN/images/scratch-template-annotated.png b/vi-VN/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/vi-VN/images/scratch-template-annotated.png differ diff --git a/vi-VN/images/scratch-template.png b/vi-VN/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/vi-VN/images/scratch-template.png differ diff --git a/vi-VN/images/smart-classroom.gif b/vi-VN/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/vi-VN/images/smart-classroom.gif differ diff --git a/vi-VN/images/test-new-model-annotated.png b/vi-VN/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/vi-VN/images/test-new-model-annotated.png differ diff --git a/vi-VN/images/test-with-new-blocks-annotated.png b/vi-VN/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/vi-VN/images/test-with-new-blocks-annotated.png differ diff --git a/vi-VN/images/test-with-new-blocks.png b/vi-VN/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/vi-VN/images/test-with-new-blocks.png differ diff --git a/vi-VN/images/train-new-model-annotated.png b/vi-VN/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/vi-VN/images/train-new-model-annotated.png differ diff --git a/vi-VN/images/train-new-model.png b/vi-VN/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/vi-VN/images/train-new-model.png differ diff --git a/vi-VN/images/what-you-will-make.png b/vi-VN/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/vi-VN/images/what-you-will-make.png differ diff --git a/vi-VN/meta.yml b/vi-VN/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/vi-VN/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/vi-VN/step_1.md b/vi-VN/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/vi-VN/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/vi-VN/step_2.md b/vi-VN/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/vi-VN/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/vi-VN/step_3.md b/vi-VN/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/vi-VN/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/vi-VN/step_4.md b/vi-VN/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/vi-VN/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/vi-VN/step_5.md b/vi-VN/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/vi-VN/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/vi-VN/step_6.md b/vi-VN/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/vi-VN/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/vi-VN/step_7.md b/vi-VN/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/vi-VN/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/vi-VN/step_8.md b/vi-VN/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/vi-VN/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/vi-VN/step_9.md b/vi-VN/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/vi-VN/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/zh-CN/images/banner.png b/zh-CN/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/zh-CN/images/banner.png differ diff --git a/zh-CN/images/cheese-sandwich-annotated.png b/zh-CN/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/zh-CN/images/cheese-sandwich-annotated.png differ diff --git a/zh-CN/images/cheese-sandwich.png b/zh-CN/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/zh-CN/images/cheese-sandwich.png differ diff --git a/zh-CN/images/click-flag-annotated.png b/zh-CN/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/zh-CN/images/click-flag-annotated.png differ diff --git a/zh-CN/images/click-flag.png b/zh-CN/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/zh-CN/images/click-flag.png differ diff --git a/zh-CN/images/code-new-blocks.png b/zh-CN/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/zh-CN/images/code-new-blocks.png differ diff --git a/zh-CN/images/code-with-confidence.png b/zh-CN/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/zh-CN/images/code-with-confidence.png differ diff --git a/zh-CN/images/create-project-annotated.png b/zh-CN/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/zh-CN/images/create-project-annotated.png differ diff --git a/zh-CN/images/create-project.png b/zh-CN/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/zh-CN/images/create-project.png differ diff --git a/zh-CN/images/empty-buckets.png b/zh-CN/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/zh-CN/images/empty-buckets.png differ diff --git a/zh-CN/images/full-buckets.png b/zh-CN/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/zh-CN/images/full-buckets.png differ diff --git a/zh-CN/images/open-scratch-3-annotated.png b/zh-CN/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/zh-CN/images/open-scratch-3-annotated.png differ diff --git a/zh-CN/images/open-scratch-3.png b/zh-CN/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/zh-CN/images/open-scratch-3.png differ diff --git a/zh-CN/images/project-make-annotated.png b/zh-CN/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/zh-CN/images/project-make-annotated.png differ diff --git a/zh-CN/images/project-make.png b/zh-CN/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/zh-CN/images/project-make.png differ diff --git a/zh-CN/images/project-templates-annotated.png b/zh-CN/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/zh-CN/images/project-templates-annotated.png differ diff --git a/zh-CN/images/project-templates.png b/zh-CN/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/zh-CN/images/project-templates.png differ diff --git a/zh-CN/images/project-train-annotated.png b/zh-CN/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/zh-CN/images/project-train-annotated.png differ diff --git a/zh-CN/images/projects-list-annotated.png b/zh-CN/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/zh-CN/images/projects-list-annotated.png differ diff --git a/zh-CN/images/projects-list.png b/zh-CN/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/zh-CN/images/projects-list.png differ diff --git a/zh-CN/images/scratch-template-annotated.png b/zh-CN/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/zh-CN/images/scratch-template-annotated.png differ diff --git a/zh-CN/images/scratch-template.png b/zh-CN/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/zh-CN/images/scratch-template.png differ diff --git a/zh-CN/images/smart-classroom.gif b/zh-CN/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/zh-CN/images/smart-classroom.gif differ diff --git a/zh-CN/images/test-new-model-annotated.png b/zh-CN/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/zh-CN/images/test-new-model-annotated.png differ diff --git a/zh-CN/images/test-with-new-blocks-annotated.png b/zh-CN/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/zh-CN/images/test-with-new-blocks-annotated.png differ diff --git a/zh-CN/images/test-with-new-blocks.png b/zh-CN/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/zh-CN/images/test-with-new-blocks.png differ diff --git a/zh-CN/images/train-new-model-annotated.png b/zh-CN/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/zh-CN/images/train-new-model-annotated.png differ diff --git a/zh-CN/images/train-new-model.png b/zh-CN/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/zh-CN/images/train-new-model.png differ diff --git a/zh-CN/images/what-you-will-make.png b/zh-CN/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/zh-CN/images/what-you-will-make.png differ diff --git a/zh-CN/meta.yml b/zh-CN/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/zh-CN/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/zh-CN/step_1.md b/zh-CN/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/zh-CN/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/zh-CN/step_2.md b/zh-CN/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/zh-CN/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/zh-CN/step_3.md b/zh-CN/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/zh-CN/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/zh-CN/step_4.md b/zh-CN/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/zh-CN/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/zh-CN/step_5.md b/zh-CN/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/zh-CN/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/zh-CN/step_6.md b/zh-CN/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/zh-CN/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/zh-CN/step_7.md b/zh-CN/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/zh-CN/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/zh-CN/step_8.md b/zh-CN/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/zh-CN/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/zh-CN/step_9.md b/zh-CN/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/zh-CN/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file diff --git a/zh-TW/images/banner.png b/zh-TW/images/banner.png new file mode 100644 index 0000000..10aa4ad Binary files /dev/null and b/zh-TW/images/banner.png differ diff --git a/zh-TW/images/cheese-sandwich-annotated.png b/zh-TW/images/cheese-sandwich-annotated.png new file mode 100644 index 0000000..1a7d932 Binary files /dev/null and b/zh-TW/images/cheese-sandwich-annotated.png differ diff --git a/zh-TW/images/cheese-sandwich.png b/zh-TW/images/cheese-sandwich.png new file mode 100644 index 0000000..16ecc1b Binary files /dev/null and b/zh-TW/images/cheese-sandwich.png differ diff --git a/zh-TW/images/click-flag-annotated.png b/zh-TW/images/click-flag-annotated.png new file mode 100644 index 0000000..635f0f2 Binary files /dev/null and b/zh-TW/images/click-flag-annotated.png differ diff --git a/zh-TW/images/click-flag.png b/zh-TW/images/click-flag.png new file mode 100644 index 0000000..e4bdd64 Binary files /dev/null and b/zh-TW/images/click-flag.png differ diff --git a/zh-TW/images/code-new-blocks.png b/zh-TW/images/code-new-blocks.png new file mode 100644 index 0000000..171119a Binary files /dev/null and b/zh-TW/images/code-new-blocks.png differ diff --git a/zh-TW/images/code-with-confidence.png b/zh-TW/images/code-with-confidence.png new file mode 100644 index 0000000..929bbef Binary files /dev/null and b/zh-TW/images/code-with-confidence.png differ diff --git a/zh-TW/images/create-project-annotated.png b/zh-TW/images/create-project-annotated.png new file mode 100644 index 0000000..86a2677 Binary files /dev/null and b/zh-TW/images/create-project-annotated.png differ diff --git a/zh-TW/images/create-project.png b/zh-TW/images/create-project.png new file mode 100644 index 0000000..cd316fb Binary files /dev/null and b/zh-TW/images/create-project.png differ diff --git a/zh-TW/images/empty-buckets.png b/zh-TW/images/empty-buckets.png new file mode 100644 index 0000000..ec8b701 Binary files /dev/null and b/zh-TW/images/empty-buckets.png differ diff --git a/zh-TW/images/full-buckets.png b/zh-TW/images/full-buckets.png new file mode 100644 index 0000000..e54d414 Binary files /dev/null and b/zh-TW/images/full-buckets.png differ diff --git a/zh-TW/images/open-scratch-3-annotated.png b/zh-TW/images/open-scratch-3-annotated.png new file mode 100644 index 0000000..ea73a2f Binary files /dev/null and b/zh-TW/images/open-scratch-3-annotated.png differ diff --git a/zh-TW/images/open-scratch-3.png b/zh-TW/images/open-scratch-3.png new file mode 100644 index 0000000..8c1dd22 Binary files /dev/null and b/zh-TW/images/open-scratch-3.png differ diff --git a/zh-TW/images/project-make-annotated.png b/zh-TW/images/project-make-annotated.png new file mode 100644 index 0000000..ab94bb0 Binary files /dev/null and b/zh-TW/images/project-make-annotated.png differ diff --git a/zh-TW/images/project-make.png b/zh-TW/images/project-make.png new file mode 100644 index 0000000..8c608d5 Binary files /dev/null and b/zh-TW/images/project-make.png differ diff --git a/zh-TW/images/project-templates-annotated.png b/zh-TW/images/project-templates-annotated.png new file mode 100644 index 0000000..2ca0e8a Binary files /dev/null and b/zh-TW/images/project-templates-annotated.png differ diff --git a/zh-TW/images/project-templates.png b/zh-TW/images/project-templates.png new file mode 100644 index 0000000..9ff67e1 Binary files /dev/null and b/zh-TW/images/project-templates.png differ diff --git a/zh-TW/images/project-train-annotated.png b/zh-TW/images/project-train-annotated.png new file mode 100644 index 0000000..4a8c8c4 Binary files /dev/null and b/zh-TW/images/project-train-annotated.png differ diff --git a/zh-TW/images/projects-list-annotated.png b/zh-TW/images/projects-list-annotated.png new file mode 100644 index 0000000..2c52a50 Binary files /dev/null and b/zh-TW/images/projects-list-annotated.png differ diff --git a/zh-TW/images/projects-list.png b/zh-TW/images/projects-list.png new file mode 100644 index 0000000..48f51e7 Binary files /dev/null and b/zh-TW/images/projects-list.png differ diff --git a/zh-TW/images/scratch-template-annotated.png b/zh-TW/images/scratch-template-annotated.png new file mode 100644 index 0000000..bb56508 Binary files /dev/null and b/zh-TW/images/scratch-template-annotated.png differ diff --git a/zh-TW/images/scratch-template.png b/zh-TW/images/scratch-template.png new file mode 100644 index 0000000..c02ffcc Binary files /dev/null and b/zh-TW/images/scratch-template.png differ diff --git a/zh-TW/images/smart-classroom.gif b/zh-TW/images/smart-classroom.gif new file mode 100644 index 0000000..eab8141 Binary files /dev/null and b/zh-TW/images/smart-classroom.gif differ diff --git a/zh-TW/images/test-new-model-annotated.png b/zh-TW/images/test-new-model-annotated.png new file mode 100644 index 0000000..cab28af Binary files /dev/null and b/zh-TW/images/test-new-model-annotated.png differ diff --git a/zh-TW/images/test-with-new-blocks-annotated.png b/zh-TW/images/test-with-new-blocks-annotated.png new file mode 100644 index 0000000..d704146 Binary files /dev/null and b/zh-TW/images/test-with-new-blocks-annotated.png differ diff --git a/zh-TW/images/test-with-new-blocks.png b/zh-TW/images/test-with-new-blocks.png new file mode 100644 index 0000000..4073558 Binary files /dev/null and b/zh-TW/images/test-with-new-blocks.png differ diff --git a/zh-TW/images/train-new-model-annotated.png b/zh-TW/images/train-new-model-annotated.png new file mode 100644 index 0000000..ac4ed49 Binary files /dev/null and b/zh-TW/images/train-new-model-annotated.png differ diff --git a/zh-TW/images/train-new-model.png b/zh-TW/images/train-new-model.png new file mode 100644 index 0000000..5768635 Binary files /dev/null and b/zh-TW/images/train-new-model.png differ diff --git a/zh-TW/images/what-you-will-make.png b/zh-TW/images/what-you-will-make.png new file mode 100644 index 0000000..ae7ac98 Binary files /dev/null and b/zh-TW/images/what-you-will-make.png differ diff --git a/zh-TW/meta.yml b/zh-TW/meta.yml new file mode 100644 index 0000000..d0c37bc --- /dev/null +++ b/zh-TW/meta.yml @@ -0,0 +1,28 @@ +--- +title: Smart classroom assistant +hero_image: images/banner.png +description: Create a virtual classroom assistant that reacts to commands +version: 4 +listed: true +copyedit: true +last_tested: "2019-05-09" +steps: + - + title: Introduction + - + title: How to create a project + - + title: Add a list of rules + - + title: Collect examples for training + - + title: Train and test a machine learning model + - + title: Use the machine learning model in Scratch + - + title: How to use confidence scores + - + title: "Challenge: more items to control" + challenge: true + - + title: What next? diff --git a/zh-TW/step_1.md b/zh-TW/step_1.md new file mode 100644 index 0000000..b60b2b2 --- /dev/null +++ b/zh-TW/step_1.md @@ -0,0 +1,56 @@ +## Introduction + +In this project you will use [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk){:target="_blank"} to make a smart virtual classroom assistant that reacts to what you say to it. You’ll be able to control the virtual devices in the classroom by typing in commands! + +First, you’ll create an assistant that uses a list of rules for understanding commands, and you'll learn why that approach isn’t very good. + +Next, you will teach the assistant to recognise commands for different devices by training it using examples of each command. + +### What you will make + +\--- print-only \--- + +![Complete project](images/what-you-will-make.png) + +\--- /print-only \--- + +\--- no-print \--- + +![Complete project GIF](images/smart-classroom.gif) + +\--- /no-print \--- + +\--- collapse \--- +--- +title: What you will learn +--- + ++ How to train and test a machine learning model ++ Why this approach is better than using a long list of rules ++ How confidence scores can improve the user experience + +\--- /collapse \--- + +\--- collapse \--- +--- +title: What you will need +--- + ++ A computer connected to the internet + +\--- /collapse \--- + +\--- collapse \--- +--- +title: Additional information for educators +--- + +If you need to print this project, please use the [printer-friendly version](https://projects.raspberrypi.org/en/projects/smart-classroom/print){:target="_blank"}. + +\--- /collapse \--- + +### Licence + +This project is dual-licensed under both a [Creative Commons Attribution Non-Commercial Share-Alike License](http://creativecommons.org/licenses/by-nc-sa/4.0/){:target="_blank"} and an [Apache License Version 2.0](http://www.apache.org/licenses/LICENSE-2.0){:target="_blank"}. + +We'd like to thank Dale from machinelearningforkids.co.uk for all his work on this project. diff --git a/zh-TW/step_2.md b/zh-TW/step_2.md new file mode 100644 index 0000000..8e6b78d --- /dev/null +++ b/zh-TW/step_2.md @@ -0,0 +1,33 @@ +## How to create a project + +\--- task \--- ++ Go to [machinelearningforkids.co.uk](https://machinelearningforkids.co.uk/){:target="_blank"} in a web browser. + ++ Click on **Get Started** + ++ Click on **Try it now**. \--- /task \--- + +\--- task \--- ++ Click on **Projects** in the menu bar at the top. + ++ Click on the **+ Add a new project** button. + ++ Name your project 'smart classroom' and set it to learn to recognise **text**. Then click on **Create**. ![Creating a project](images/create-project-annotated.png) + ++ You should now see 'smart classroom' in the projects list. Click on this project. ![Project list with smart classroom listed](images/projects-list-annotated.png) \--- /task \--- + +\--- task \--- + +Now get a project ready in Scratch. + ++ Click on **Make**. ![Project main menu](images/project-make-annotated.png) + ++ Click on **Scratch 3**. + ++ Click on **Scratch by itself**. The page then warns you that you haven’t done any machine learning yet. Click on **Scratch by itself** to launch Scratch. + ++ Click on **Project templates**. ![Scratch menu bar](images/project-templates-annotated.png) + ++ Click on the **Smart Classroom** template. + +![Scratch template project](images/scratch-template.png) \--- /task \--- diff --git a/zh-TW/step_3.md b/zh-TW/step_3.md new file mode 100644 index 0000000..67c7edc --- /dev/null +++ b/zh-TW/step_3.md @@ -0,0 +1,53 @@ +## Add a list of rules + +In this step, you will edit the project to include a list of rules to activate and de-activate the fan and the lamp. + +\--- task \--- ++ Click the **classroom** sprite to select it, as shown below: + +![Scratch template project](images/scratch-template-annotated.png) + ++ Click on the **Scripts** tab and create the following script: + +```blocks3 +when green flag clicked +forever +ask [Enter your command] and wait +if <(answer) = [Turn on the fan]> then +broadcast (turn-fan-on v) +end +if <(answer) = [Turn off the fan]> then +broadcast (turn-fan-off v) +end +if <(answer) = [Turn on the lamp]> then +broadcast (turn-lamp-on v) +end +if <(answer) = [Turn off the lamp]> then +broadcast (turn-lamp-off v) +end +end +``` + ++ Click on **File** and then on **Save to your computer**, and save the program to a file. \--- /task \--- + +\--- task \--- + ++ Click on the **green flag** to test your program. + +![Scratch interface just after green flag is clicked](images/click-flag-annotated.png) + ++ Type in a command and watch the program react! The following commands should all work: + * “Turn on the lamp” + * “Turn off the lamp” + * “Turn on the fan” + * “Turn off the fan” + +* Type in anything else, and your program does nothing! Even if you make a small spelling mistake, the program does not react. + +\--- /task \--- + +You’re telling your virtual classroom assistant to react to commands using a simple rules-based approach. But if you wanted your program to understand commands that are phrased differently, you would need to add extra `if` blocks. + +The problem with this rules-based approach is that you need to exactly predict all the commands the smart classroom assistant will get. Listing every possible command would take a very, very long time. + +Next, you will try a better approach: teaching the computer to recognise commands by itself. \ No newline at end of file diff --git a/zh-TW/step_4.md b/zh-TW/step_4.md new file mode 100644 index 0000000..af80f35 --- /dev/null +++ b/zh-TW/step_4.md @@ -0,0 +1,47 @@ +## Collect examples for training + +\--- task \--- ++ Close the Scratch window and go back to the Training tool. + ++ Click on the **< Back to project** link. \--- /task \--- + +\--- task \--- ++ Click on the **Train** button. ![Project main menu](images/project-train-annotated.png) + +You need to collect some examples to train the computer. To collect different examples, you need to create 'buckets' to put the examples in. + ++ To create a bucket, click on **+ Add new label** and call the bucket “fan on”. Click on **+ Add new label** again and create a second bucket called “fan off”. Create a third and a fourth bucket called “lamp on” and "lamp off". ![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/empty-buckets.png) + ++ Click on the **Add example** button in the “fan on” bucket, and type in a command asking for the fan to be turned on. For example, you could type “Please can you switch on the fan”. + ++ Click on the **Add example** button in the “fan off” bucket, and type in a command asking for the fan to be switched off. For example, you could type “I want the fan off now”. + ++ Do the same for the “lamp on” and “lamp off” buckets. + +\--- /task \--- + +\--- task \--- ++ Continue to **Add example**s until you have at least **six** examples in **each** bucket. + +Be imaginative! Try and think of lots of different ways to ask each command. For example: + ++ For “fan on”, you could complain that you’re too hot. ++ For “fan off”, you could complain that it’s too breezy. ++ For “lamp on”, you could complain that you can’t see. ++ For “lamp off”, you could complain that it’s too bright. + +![4 empty classes named fan_on, fan_off, lamp_on and lamp_off](images/full-buckets.png) + +\--- collapse \--- +--- +title: Tips for selecting good examples +--- ++ **More is good**: the more examples you give your program, the better the program should get at recognising your commands. + ++ **Equal numbers**: add roughly the same number of examples for each command. If you have a lot of examples for one command and not the others, this can affect the way that the program learns to recognise commands. + ++ **Make the examples really different from each other**: try to come up with lots of different types of examples. For example, make sure that you include some long examples and some very short ones. + +\--- /collapse \--- \--- /task \--- + +In the next step you will train your program to recognise any new command automatically by comparing it to the examples in the four buckets. diff --git a/zh-TW/step_5.md b/zh-TW/step_5.md new file mode 100644 index 0000000..7678ea3 --- /dev/null +++ b/zh-TW/step_5.md @@ -0,0 +1,28 @@ +## Train and test a machine learning model + +You will now train the program using the examples, and then test it. + +The program will learn from patterns in the examples you give it, such as the choice of words and the way sentences are structured. Then, based on the patterns the program finds, it can interpret new commands. + +\--- task \--- ++ Click on the **< Back to project** link, then click on **Learn & Test**. + ++ Click on the **Train new machine learning model** button. If you have enough examples, the program should start to learn how to recognise commands from these examples. + +![Annotation pointing to train new machine learning model button](images/train-new-model-annotated.png) \--- /task \--- + +Wait for the training to complete. This might take a minute or two. While you wait, complete the machine-learning multi-choice quiz at the bottom of the page. + +\--- task \--- Once the training has completed, a test box appears. Test your machine learning model to see what it has learned. + ++ Type in one of the commands you added to a bucket, and then press Enter. The command should be recognised. + ++ Type in **commands that are not in the buckets**. + +If you’re not happy with how the computer recognises the commands, go back to the previous step and add some more examples. Then **train new machine learning model** again. + +![Annotation pointing to train new machine learning model button](images/test-new-model-annotated.png) \--- /task \--- + +Instead of writing rules for the program, you are giving the program examples. The program uses the examples to train a machine learning **model**. + +Because you are supervising the program's training by giving examples, this machine learning approach is called **supervised learning**. \ No newline at end of file diff --git a/zh-TW/step_6.md b/zh-TW/step_6.md new file mode 100644 index 0000000..dbc1756 --- /dev/null +++ b/zh-TW/step_6.md @@ -0,0 +1,47 @@ +## Use the machine learning model in Scratch + +Now update your Scratch program to include your machine learning model instead of a rules-based approach. + +\--- task \--- ++ Click on the **< Back to project** link. + ++ Click on **Make**. + ++ Click on **Scratch 3**. + ++ Read the instructions on the page to learn how to use machine learning blocks in Scratch. + ++ Click on **Open in Scratch 3**. + +![annotation pointing at Open in scratch 3 button](images/open-scratch-3-annotated.png) + ++ Click on **File** and then on **Load from your computer**, and select the Scratch project you saved earlier. + ++ When Scratch asks you whether to replace the current project, click on **OK**. + +\--- /task \--- + +\--- task \--- + ++ Click on the **Code** tab, and update your Scratch code to use your machine learning model **instead** of the rules you first added. + +The `recognise text … (label)` block is a new block added by your project. This new block can receive a message and return one of the four labels, based on the machine learning model you have trained. + +![New scratch code including new machine learning blocks](images/code-new-blocks.png) \--- /task \--- + +\--- task \--- ++ Click the **green flag** to test again. + +![Testing new code from previous instruction](images/test-with-new-blocks-annotated) + ++ Test your project: type a command and press Enter on your keyboard. The fan or lamp should react to your command. + +Make sure you test that this works **even for commands that you didn’t include as examples in the buckets.** + ++ Save your project: click on **File** and then on **Save to your computer**. \--- /task \--- + +Now your Scratch smart virtual classroom uses a machine learning model instead of a rules-based approach. + +Using machine learning is better than using rules, because training a program to recognise commands for itself is much quicker than trying to make a list of every possible command. + +The more examples you use to train the machine learning model, the better your program should get at recognising commands. \ No newline at end of file diff --git a/zh-TW/step_7.md b/zh-TW/step_7.md new file mode 100644 index 0000000..4e6d8c9 --- /dev/null +++ b/zh-TW/step_7.md @@ -0,0 +1,36 @@ +## How to use confidence scores + +Finally, you will learn about what confidence scores mean and how you should use them. + +\--- task \--- ++ Leave Scratch open, because you will come back in a moment. + ++ Go back to the **Learn & Test** page in the Training tool. + ++ Type something that has nothing to do with lamps or fans into the test box. For example, you could type in 'make me a cheese sandwich'. ![Result of entering "make me a cheese sandwich" is lamp off with 21% confidence](images/cheese-sandwich-annotated.png) + ++ Look at the confidence score, which should be very low. + ++ Compare this with the confidence score you get for a command such as “turn on the lamp”. + +**The confidence score is the program’s way of telling you how certain it is that it understands a command.** If a command is very similar to the examples you have trained the program with, the confidence score is high. If a command is **not** similar, the confidence score is low. + +\--- /task \--- + +\--- task \--- + ++ Go back to your classroom assistant project in Scratch. + ++ Modify the script for the 'classroom' sprite so that it uses the confidence score: + +![New code to be added into scratch program](images/code-with-confidence.png) + ++ Click the green flag and test your program to check that your classroom assistant reacts in the right way: + + Type in commands that have nothing to do with the fan or lamp + + Ask for something to be turned on or off + +Now, if your program is not sure what you mean, it tells you so. Then you can try giving it another command. \--- /task \--- + +You’ve used machine learning to train a smart assistant that is a simple version of the assistants you can get on smartphones (e.g. Apple’s Siri or Google’s Assistant) or at home (e.g. Amazon’s Alexa or Google’s Home). + +Training the program to recognise commands is much easier than trying to make a list of every possible command. And the more examples you give the program, the better it gets at recognising commands, and the more its confidence scores increase. \ No newline at end of file diff --git a/zh-TW/step_8.md b/zh-TW/step_8.md new file mode 100644 index 0000000..5ae3bff --- /dev/null +++ b/zh-TW/step_8.md @@ -0,0 +1,35 @@ +## Challenge: more items to control + +\--- challenge \--- \--- task \--- + +**Add another item** + ++ In addition to a fan and a lamp, can you add another item and train your smart classroom assistant to understand your commands for controlling the item? + +\--- /task \--- + +\--- task \--- + +**Try our different confidence scores** + ++ Is 70% the correct confidence score for deciding whether the smart classroom assistant has recognised a command correctly? Experiment with different confidence scores until you have a value that works well for your machine learning model. + +If you choose a number that is too high, the assistant will say “Sorry I’m not sure what you mean” too often. + +If you choose a number that is too low, the assistant will get too many things wrong. \--- /task \--- + +\--- task \--- + +**Real smart assistants** + +People have made [their own smart assistants based on Amazon’s Alexa](http://amzn.to/2sxy1hw){:target="_blank"}. + +People make these assistants the same way that you made yours: +1. First, they create buckets for the types of commands they want their assistants to recognise +1. Then they collect examples of how the commands might be phrased and trained the Alexa-based assistant to understand them + ++ Find an Alexa Skill that you find interesting and look at the commands it can understand. How would you have trained this program? + +\--- /task \--- + +\--- /challenge \--- diff --git a/zh-TW/step_9.md b/zh-TW/step_9.md new file mode 100644 index 0000000..ebc1775 --- /dev/null +++ b/zh-TW/step_9.md @@ -0,0 +1,7 @@ +## What next? + +If you haven't already, try our other machine learning with Scratch projects. + +[Journey to school](https://projects.raspberrypi.org/en/projects/journey-to-school) + +[Alien language](https://projects.raspberrypi.org/en/projects/alien-language) \ No newline at end of file