diff --git a/app/__init__.py b/app/__init__.py index 1839f87..c2c768e 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -5,7 +5,7 @@ import re from glob import glob from typing import Optional -from flask import Flask, abort, send_from_directory, render_template +from flask import Flask, Response, abort, send_from_directory, render_template from markdown import markdown basedir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..')) @@ -63,13 +63,14 @@ def get_page(page: str, title: Optional[str] = None): description=metadata.get('description'), published=(metadata['published'].strftime('%b %d, %Y') if metadata.get('published') else None), - content=markdown(f.read(),extensions=['fenced_code', 'codehilite'])) + content=markdown(f.read(), extensions=['fenced_code', 'codehilite'])) -def get_pages() -> list: +def get_pages(with_content: bool = False) -> list: return sorted([ { 'path': path, + 'content': get_page(path) if with_content else '', **get_page_metadata(os.path.basename(path)), } for path in glob(os.path.join(pages_dir, '*.md')) @@ -99,3 +100,50 @@ def css_route(style: str): @app.route('/article/
', methods=['GET']) def article_route(article: str): return get_page(article) + + +@app.route('/rss', methods=['GET']) +def rss_route(): + pages = get_pages(with_content=True) + + return Response(''' + + + Platypush blog feeds + http://blog.platypush.tech + Insights and inspirational projects using Platypush as an automation platform + Programming, automation, Python, machine learning, IoT, smart home + + https://git.platypush.tech/uploads/-/system/appearance/header_logo/1/icon-256.png + Platypush + https://git.platypush.tech + + {last_pub_date} + en-us + + {items} + +'''.format( + last_pub_date=pages[0]['published'].strftime('%a, %d %b %Y %H:%M:%S GMT'), + items='\n\n'.join([ + ''' + + {title} + https://blog.platypush.tech{link} + {published} + + + https://blog.platypush.tech/img{image} + + + '''.format( + title=page.get('title', '[No Title]'), + link=page.get('uri', ''), + published=page['published'].strftime('%a, %d %b %Y %H:%M:%S GMT') if 'published' in page else '', + # description=page.get('description', ''), + content=page.get('content', ''), + image=page.get('image', ''), + ) + for page in pages + ]), + ), mimetype='application/rss+xml') diff --git a/static/css/blog.css b/static/css/blog.css index 90e5a6a..5726700 100644 --- a/static/css/blog.css +++ b/static/css/blog.css @@ -1,9 +1,15 @@ main .content { display: flex; flex-direction: column; - text-align: justify; line-height: 1.5em; - letter-spacing: .04em; +} + +main .content p, +main .content ul { + font-family: Avenir, Palatino, charter, Georgia, Cambria, "Times New Roman", Times, serif; + text-align: justify; + overflow-wrap: break-word; + word-break: break-word; } main .content code, .codehilite { diff --git a/static/css/common.css b/static/css/common.css index 83dd2f5..ac1618a 100644 --- a/static/css/common.css +++ b/static/css/common.css @@ -1,74 +1,97 @@ html { - font-size: calc(1em + 1vw); -} - -@media screen and (min-width: 1024px) { - html { - font-size: 20px; - } + height: 100%; + font-size: 20px; + font-family: BlinkMacSystemFont, -apple-system, Segoe UI, Roboto, Oxygen, Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, Helvetica, Arial, sans-serif; + font-weight: 400; + text-rendering: optimizeLegibility; } body { - margin: 0; - width: 100%; - height: 100%; + margin: 0; + width: 100%; + height: 100%; } a, a:visited { - color: #555; - border-bottom: 1px dashed #999; - text-decoration: none; + color: #555; + border-bottom: 1px dashed #999; + text-decoration: none; } header { - display: flex; - align-items: center; - height: 3em; - padding: 0 .5em; - box-shadow: 1px 3px 3px 0 #bbb; + display: flex; + align-items: center; + height: 8%; + padding: 0 .5em; + box-shadow: 1px 3px 3px 0 #bbb; } -@media screen and (max-width: 767px) { - header { - height: 4em; - } +header a { + display: flex; + align-items: center; + border-bottom: none; } -header > a { - display: flex; - align-items: center; - border-bottom: none; +header .left, +header .left a, +header .right { + display: inline-flex; + align-items: center; + width: 50%; +} + +header .right { + display: inline-flex; + justify-content: right; + text-align: right; + opacity: .8; } header .icon { - background: url(/img/icon.png); - background-size: 40px; - width: 40px; - height: 40px; - display: inline-flex; - margin-right: 1em; + background-size: 40px !important; + width: 40px; + height: 40px; + display: inline-flex; +} + +header .left .icon { + background: url(/img/icon.png); + margin-right: 1em; +} + +header .right .icon { + background: url(/img/rss.png); } header .title { - display: inline-flex; + display: inline-flex; } main { - height: calc(100% - 3em); - overflow: auto; - display: flex; - flex-direction: column; - align-items: center; - font-family: Avenir, Palatino, Georgia, Verdana, Helvetica, Arial, sans-serif; - padding: 0 2em; + height: 92%; + overflow: auto; + display: flex; + flex-direction: column; + align-items: center; + padding: 0 2em; } h1 { - font-size: 2em; - line-height: 1.2em; + font-size: 2em; + line-height: 1.2em; } h2 { - font-size: 1.5em; - line-height: 1.1em; + font-size: 1.5em; + line-height: 1.1em; +} + +@media screen and (min-width: 768px) { + header { + height: 6%; + } + + main { + height: 94%; + } } diff --git a/static/css/home.css b/static/css/home.css index ae10312..859dff9 100644 --- a/static/css/home.css +++ b/static/css/home.css @@ -4,11 +4,16 @@ main { .articles { width: 100%; + height: 100%; display: flex; + flex-wrap: wrap; + overflow: auto; } .article { display: block; + width: 100%; + max-height: 80%; box-shadow: 0 1px 3px 1px #ddd; overflow: hidden; text-overflow: ellipsis; @@ -17,33 +22,39 @@ main { .article:hover { box-shadow: 0 1px 4px 2px #bcbcbc; - } +} -@media screen and (max-width: 767px) { +@media screen and (min-width: 767px) { .article { - width: 100%; + max-height: 65%; } } -@media screen and (min-width: 768px) and (max-width: 990px) { +@media screen and (min-width: 640px) and (max-width: 767px) { + .article { + padding: 0 calc(1em + 7vw); + } +} + +@media screen and (min-width: 768px) and (max-width: 979px) { .article { width: 50%; } } -@media screen and (min-width: 990px) and (max-width: 1023px) { +@media screen and (min-width: 980px) and (max-width: 1279px) { .article { width: 33%; } } -@media screen and (min-width: 1024px) and (max-width: 1279px) { +@media screen and (min-width: 1280px) and (max-width: 1599px) { .article { width: 25%; } } -@media screen and (min-width: 1280px) { +@media screen and (min-width: 1600px) { .article { width: 20%; } diff --git a/static/img/people-detect-1.png b/static/img/people-detect-1.png new file mode 100644 index 0000000..d3b83fa Binary files /dev/null and b/static/img/people-detect-1.png differ diff --git a/static/img/rss.png b/static/img/rss.png new file mode 100644 index 0000000..2c354d5 Binary files /dev/null and b/static/img/rss.png differ diff --git a/static/pages/Detect-people-with-a-RaspberryPi-a-thermal-camera-Platypush-and-a-pinch-of-machine-learning.md b/static/pages/Detect-people-with-a-RaspberryPi-a-thermal-camera-Platypush-and-a-pinch-of-machine-learning.md new file mode 100644 index 0000000..edd4ebe --- /dev/null +++ b/static/pages/Detect-people-with-a-RaspberryPi-a-thermal-camera-Platypush-and-a-pinch-of-machine-learning.md @@ -0,0 +1,168 @@ +[//]: # (title: Detect people with a RaspberryPi, a thermal camera, Platypush and Tensorflow) +[//]: # (description: Use cheap components and open-source software to build a robust presence detector.) +[//]: # (image: /img/people-detect-1.png) +[//]: # (published: 2019-09-27) + +Triggering events based on the presence of people has been the dream of many geeks and DIY automation junkies for a +while. Having your house to turn the lights on or off when you enter or exit your living room is an interesting +application, for instance. Most of the solutions out there to solve these kinds of problems, even more high-end +solutions like the [Philips Hue sensors](https://www2.meethue.com/en-us/p/hue-motion-sensor/046677473389), detect +motion, not actual people presence — which means that the lights will switch off once you lay on your couch like a +sloth. The ability to turn off music and/or tv when you exit the room and head to your bedroom, without the hassle of +switching all the buttons off, is also an interesting corollary. Detecting the presence of people in your room while +you’re not at home is another interesting application. + +Thermal cameras coupled with deep neural networks are a much more robust strategy to actually detect the presence of +people. Unlike motion sensors, they will detect the presence of people even when they aren’t moving. And unlike optical +cameras, they detect bodies by measuring the heat that they emit in the form of infrared radiation, and are therefore +much more robust — their sensitivity doesn’t depend on lighting conditions, on the position of the target, or the +colour. Before exploring the thermal camera solution, I tried for a while to build a model that instead relied on +optical images from a traditional webcam. The differences are staggering: I trained the optical model on more than ten +thousands 640x480 images taken all through a week in different lighting conditions, while I trained the thermal camera +model on a dataset of 900 24x32 images taken during a single day. Even with more complex network architectures, the +optical model wouldn’t score above a 91% accuracy in detecting the presence of people, while the thermal model would +achieve around 99% accuracy within a single training phase of a simpler neural network. Despite the high potential, +there’s not much out there in the market — there’s been some research work on the topic (if you google “people detection +thermal camera” you’ll mostly find research papers) and a few high-end and expensive products for professional +surveillance. In lack of ready-to-go solutions for my house, I decided to take on my duty and build my own solution — +making sure that it can easily be replicated by anyone. + +## Prepare the hardware + +For this example we'll use the following hardware: + +- A RaspberryPi (cost: around $35). In theory any model should work, but it’s probably not a good idea to use a + single-core RaspberryPi Zero for machine learning tasks — the task itself is not very expensive (we’ll only use the + Raspberry for doing predictions on a trained model, not to train the model), but it may still suffer some latency on a + Zero. Plus, it may be really painful to install some of the required libraries (like Tensorflow or OpenCV) on + the `arm6` architecture used by the RaspberryPi Zero. Any better performing model (from RPi3 onwards) should + definitely do the job. + +- A thermal camera. For this project, I’ve used the + [MLX90640](https://shop.pimoroni.com/products/mlx90640-thermal-camera-breakout) Pimoroni breakout camera (cost: $55), + as it’s relatively cheap, easy to install, and it provides good results. This camera comes in standard (55°) and + wide-angle (110°) versions. I’ve used the wide-angle model as the camera monitors a large living room, but take into + account that both have the same resolution (32x24 pixels), so the wider angle comes with the cost of a lower spatial + resolution. If you want to use a different thermal camera there’s not much you’ll need to change, as long as it comes + with a software interface for RaspberryPi and + it’s [compatible with Platypush](https://platypush.readthedocs.io/en/latest/platypush/plugins/camera.ir.mlx90640.html). + +Setting up the MLX90640 on your RaspberryPi if you have a Breakout Garden it’s easy as a pie. Fit the Breakout Garden on +top of your RaspberryPi. Fit the camera breakout into an I2C slot. Boot the RaspberryPi. Done. Otherwise, you can also +connect the device directly to the [RaspberryPi I2C interface](https://radiostud.io/howto-i2c-communication-rpi/), +either using the right hardware PINs or the software emulation layer. + +## Prepare the software + +I tested my code on Raspbian, but with a few minor modifications it should be easily adaptable to any distribution +installed on the RaspberryPi. + +The software support for the thermal camera requires a bit of work. The MLX90640 doesn’t come (yet) with a Python +ready-to-use interface, but a [C++ open-source driver is provided](https://github.com/pimoroni/mlx90640-library) - +and that's the driver that is wrapped by the Platypush integration. Instructions to install it: + +```shell +# Install the dependencies +[sudo] apt-get install libi2c-dev + +# Enable the I2C interface +echo dtparam=i2c_arm=on | sudo tee -a /boot/config.txt + +# It's advised to configure the SPI bus baud rate to +# 400kHz to support the higher throughput of the sensor +echo dtparam=i2c1_baudrate=400000 | sudo tee -a /boot/config.txt + +# A reboot is required here if you didn't have the +# options above enabled in your /boot/config.txt +[sudo] reboot + +# Clone the driver's codebase +git clone https://github.com/pimoroni/mlx90640-library +cd mlx90640-library + +# Compile the rawrgb example +make clean +make bcm2835 +make I2C_MODE=LINUX examples/rawrgb +``` + +If it all went well you should see an executable named `rawrgb` under the `examples` directory. If you run it you should +see a bunch of binary data — that’s the raw binary representation of the frames captured by the camera. Remember where +it is located or move it to a custom bin folder, as it’s the executable that platypush will use to interact with the +camera module. + +This post assumes that you have already installed and configured Platypush on your system. If not, head to my post on +[getting started with Platypush](https://blog.platypush.tech/article/Ultimate-self-hosted-automation-with-Platypush), +the [readthedocs page](https://platypush.readthedocs.io/en/latest/), the +[Gitlab page](https://git.platypush.tech/platypush/platypush) or +[the wiki](https://git.platypush.tech/platypush/platypush/-/wikis/home). + +Install also the Python dependencies for the HTTP server, the MLX90640 plugin and Tensorflow: + +```shell +[sudo] pip install 'platypush[http,tensorflow,mlx90640]' +``` + +Heading to your computer (we'll be using it for building the model that will be used on the RaspberryPi), install +OpenCV, Tensorflow and Jupyter and my utilities for handling images: + +```shell +# For image manipulation +[sudo] pip install opencv + +# Install Jupyter notebook to run the training code +[sudo] pip install jupyterlab +# Then follow the instructions at https://jupyter.org/install + +# Tensorflow framework for machine learning and utilities +[sudo] pip install tensorflow numpy matplotlib + +# Clone my repository with the image and training utilities +# and the Jupyter notebooks that we'll use for training. +git clone https://github.com/BlackLight/imgdetect-utils +``` + +## Capturing phase + +Now that you’ve got all the hardware and software in place, it’s time to start capturing frames with your camera and use +them to train your model. First, configure +the [MLX90640 plugin](https://platypush.readthedocs.io/en/latest/platypush/plugins/camera.ir.mlx90640.html) in your +Platypush configuration file (by default, `~/.config/platypush/config.yaml`): + +```yaml +# Enable the webserver +backend.http: + enabled: True + +camera.ir.mlx90640: + fps: 16 # Frames per second + rotate: 270 # Can be 0, 90, 180, 270 + rawrgb_path: /path/to/your/rawrgb +``` + +Restart the service, and if you haven't already create a user from the web interface at `http://your-rpi:8008`. You +should now be able to take pictures through the API: + +```yaml +curl -XPOST -H 'Content-Type: application/json' -d ' +{ + "type":"request", + "action":"camera.ir.mlx90640.capture", + "args": { + "output_file":"~/snap.png", + "scale_factor":20 + } +}' -a 'username:password' http://localhost:8008/execute +``` + +If everything went well, the thermal picture should be stored under `~/snap.png`. In my case it looks like this while +I’m in standing front of the sensor: + +![Thermal camera snapshot](../img/people-detect-1.png) + +Notice the glow at the bottom-right corner — that’s actually the heat from my RaspberryPi 4 CPU. It’s there in all the +images I take, and you may probably see similar results if you mounted your camera on top of the Raspberry itself, but +it shouldn’t be an issue for your model training purposes. + +If you open the web panel (`http://your-host:8008`) you’ll also notice a new tab, represented by the sun icon, that you +can use to monitor your camera from a web interface. diff --git a/templates/article.html b/templates/article.html index cf3f52a..7d7705e 100644 --- a/templates/article.html +++ b/templates/article.html @@ -1,41 +1,29 @@ - - - - - - {{ title }} - +{% with title=title or 'Platypush blog', styles=['/css/blog.css', '/css/code.css'] %} + {% include 'common-head.html' %} +{% endwith %} - -
- -
-
PlatyBlog
-
-
- -
-
-
-

{{ title }}

-
- - {% if description %} -
-

{{ description }}

-
- {% endif %} - - {% if published %} -
- Published on {{ published }} -
- {% endif %} - -
- {{ content | safe }} -
+
+
+
+

{{ title }}

-
- - + + {% if description %} +
+

{{ description }}

+
+ {% endif %} + + {% if published %} +
+ Published on {{ published }} +
+ {% endif %} + +
+ {{ content | safe }} +
+
+
+ +{% include 'common-tail.html' %} diff --git a/templates/common-head.html b/templates/common-head.html new file mode 100644 index 0000000..be4d45c --- /dev/null +++ b/templates/common-head.html @@ -0,0 +1,30 @@ + + + + + + + + + {% if styles %} + {% for style in styles %} + + {% endfor %} + {% endif %} + {{ title }} + + +
+ + +
+ +
+
+
+
diff --git a/templates/common-tail.html b/templates/common-tail.html new file mode 100644 index 0000000..308b1d0 --- /dev/null +++ b/templates/common-tail.html @@ -0,0 +1,2 @@ + + diff --git a/templates/index.html b/templates/index.html index 5effc02..3091057 100644 --- a/templates/index.html +++ b/templates/index.html @@ -1,48 +1,37 @@ - - - - - Platypush blog - +{% with title=title or 'Platypush blog', styles=['/css/home.css'] %} + {% include 'common-head.html' %} +{% endwith %} - -
- -
-
PlatyBlog
-
-
- -
-
- {% for page in pages %} - -
- {% if page['image'] %} -
- -
- {% endif %} - -
- {{ page['title'] }} -
- - {% if page['published'] %} -
- {{ page['published'].strftime('%b %d, %Y') }} -
- {% endif %} - - {% if page['description'] %} -
- {{ page['description'] }} -
- {% endif %} +
+
- - + {% endif %} + +
+ {{ page['title'] }} +
+ + {% if page['published'] %} +
+ {{ page['published'].strftime('%b %d, %Y') }} +
+ {% endif %} + + {% if page['description'] %} +
+ {{ page['description'] }} +
+ {% endif %} +
+ + {% endfor %} +
+
+ +{% include 'common-tail.html' %}