From c79cc8345e1c8ac0629c41a5cb0ec1d63d8c45e7 Mon Sep 17 00:00:00 2001 From: Mike Tyka Date: Wed, 1 May 2019 09:42:12 -0700 Subject: [PATCH] Initial commit of camera examples. --- CONTRIBUTING.md | 28 +++++ LICENSE | 202 ++++++++++++++++++++++++++++++ README.md | 59 +++++++++ download_models.sh | 16 +++ gstreamer/README.md | 33 +++++ gstreamer/classify.py | 74 +++++++++++ gstreamer/detect.py | 99 +++++++++++++++ gstreamer/gstreamer.py | 118 +++++++++++++++++ gstreamer/install_requirements.sh | 15 +++ raspicam/README.md | 29 +++++ raspicam/classify_capture.py | 73 +++++++++++ 11 files changed, 746 insertions(+) create mode 100644 CONTRIBUTING.md create mode 100644 LICENSE create mode 100644 README.md create mode 100644 download_models.sh create mode 100644 gstreamer/README.md create mode 100644 gstreamer/classify.py create mode 100644 gstreamer/detect.py create mode 100644 gstreamer/gstreamer.py create mode 100644 gstreamer/install_requirements.sh create mode 100644 raspicam/README.md create mode 100644 raspicam/classify_capture.py diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..939e534 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google.com/conduct/). diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..ade4bfe --- /dev/null +++ b/README.md @@ -0,0 +1,59 @@ +# EdgeTPU simple camera examples + +This repo contains a collection of examples that use camera streams +together with the EdgeTPU Python API. + +## Installation + +Before you start using the examples run +the ```download_models.sh``` script in order to download a selection of models. +These canned models will be downloaded and extracted to a new folder +```canned_models```. + + +Further requirements may be needed by the different camera libraries, check the +README file for the respective subfolder. + +## Contents + + * __Gstreamer__ Python examples using gstreamer to obtain camera images. These + examples work on Linux using a webcam, Raspberry Pi with + the Raspicam and on the Coral DevBoard using the Coral camera. For the + former two you will also need a Coral USB Accelerator to run the models. + * __Raspicam__ Python example using picamera. This is only intended for + Raspberry Pi and will require a Coral USB Accelerator. + +## Canned models + +For all the demos in this repository you can change the model and the labels +file by using the flags flags ```--model``` and +```--labels```. Be sure to use the models labeled _edgetpu, as those are +compiled for the accelerator - otherwise the model will run on the CPU and +be much slower. + +For classification you need to select one of the classification models +and its corresponding labels file: + +``` +inception_v1_224_quant_edgetpu.tflite, imagenet_labels.txt +inception_v2_224_quant_edgetpu.tflite, imagenet_labels.txt +inception_v3_299_quant_edgetpu.tflite, imagenet_labels.txt +inception_v4_299_quant_edgetpu.tflite, imagenet_labels.txt +mobilenet_v1_1.0_224_quant_edgetpu.tflite, imagenet_labels.txt +mobilenet_v2_1.0_224_quant_edgetpu.tflite, imagenet_labels.txt + +mobilenet_v2_1.0_224_inat_bird_quant_edgetpu.tflite, inat_bird_labels.txt +mobilenet_v2_1.0_224_inat_insect_quant_edgetpu.tflite, inat_insect_labels.txt +mobilenet_v2_1.0_224_inat_plant_quant_edgetpu.tflite, inat_plant_labels.txt +``` + +For detection you need to select one of the SSD detection models +and its corresponding labels file: + +``` +mobilenet_ssd_v1_coco_quant_postprocess_edgetpu.tflite, coco_labels.txt +mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite, coco_labels.txt +mobilenet_ssd_v2_face_quant_postprocess_edgetpu.tflite, coco_labels.txt +``` + + diff --git a/download_models.sh b/download_models.sh new file mode 100644 index 0000000..471b3e6 --- /dev/null +++ b/download_models.sh @@ -0,0 +1,16 @@ +#!/bin/sh +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +wget http://storage.googleapis.com/cloud-iot-edge-pretrained-models/canned_models.tar.gz diff --git a/gstreamer/README.md b/gstreamer/README.md new file mode 100644 index 0000000..4e128ea --- /dev/null +++ b/gstreamer/README.md @@ -0,0 +1,33 @@ +This folder contains two examples using gstreamer to obtain camera images. These +examples work on Linux using a webcam, Raspberry Pi with +the Raspicam and on the Coral DevBoard using the Coral camera. For the +former two you will also need a Coral USB Accelerator to run the models. + +## Installation + +Make sure the gstreamer libraries are install. On the Coral DevBoard this isn't +necessary, but on Raspberry Pi or a general Linux system it will be. + +``` +sh install_requirements.sh +``` + + +## Classification Demo + +``` +python3 classify.py +``` + +You can change the model and the labels file using flags ```--model``` and +```--labels```. +## Detection Demo (SSD models) + +``` +python3 detect.py +``` + +As before, you can change the model and the labels file using flags ```--model``` +and ```--labels```. + + diff --git a/gstreamer/classify.py b/gstreamer/classify.py new file mode 100644 index 0000000..f2d905f --- /dev/null +++ b/gstreamer/classify.py @@ -0,0 +1,74 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A demo which runs object classification on camera frames.""" +import argparse +import time +import re +import svgwrite +import imp +import os +from edgetpu.classification.engine import ClassificationEngine +import gstreamer + +def load_labels(path): + p = re.compile(r'\s*(\d+)(.+)') + with open(path, 'r', encoding='utf-8') as f: + lines = (p.match(line).groups() for line in f.readlines()) + return {int(num): text.strip() for num, text in lines} + +def generate_svg(dwg, text_lines): + for y, line in enumerate(text_lines): + dwg.add(dwg.text(line, insert=(11, y*20+1), fill='black', font_size='20')) + dwg.add(dwg.text(line, insert=(10, y*20), fill='white', font_size='20')) + +def main(): + default_model_dir = "../canned_models" + default_model = 'mobilenet_v2_1.0_224_quant_edgetpu.tflite' + default_labels = 'imagenet_labels.txt' + parser = argparse.ArgumentParser() + parser.add_argument('--model', help='.tflite model path', + default=os.path.join(default_model_dir,default_model)) + parser.add_argument('--labels', help='label file path', + default=os.path.join(default_model_dir, default_labels)) + parser.add_argument('--top_k', type=int, default=3, + help='number of classes with highest score to display') + parser.add_argument('--threshold', type=float, default=0.1, + help='class score threshold') + args = parser.parse_args() + + print("Loading %s with %s labels."%(args.model, args.labels)) + engine = ClassificationEngine(args.model) + labels = load_labels(args.labels) + + last_time = time.monotonic() + def user_callback(image, svg_canvas): + nonlocal last_time + start_time = time.monotonic() + results = engine.ClassifyWithImage(image, threshold=args.threshold, top_k=args.top_k) + end_time = time.monotonic() + text_lines = [ + 'Inference: %.2f ms' %((end_time - start_time) * 1000), + 'FPS: %.2f fps' %(1.0/(end_time - last_time)), + ] + for index, score in results: + text_lines.append('score=%.2f: %s' % (score, labels[index])) + print(' '.join(text_lines)) + last_time = end_time + generate_svg(svg_canvas, text_lines) + + result = gstreamer.run_pipeline(user_callback) + +if __name__ == '__main__': + main() diff --git a/gstreamer/detect.py b/gstreamer/detect.py new file mode 100644 index 0000000..54dcb4d --- /dev/null +++ b/gstreamer/detect.py @@ -0,0 +1,99 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A demo which runs object detection on camera frames. + +export TEST_DATA=/usr/lib/python3/dist-packages/edgetpu/test_data + +Run face detection model: +python3 -m edgetpuvision.detect \ + --model ${TEST_DATA}/mobilenet_ssd_v2_face_quant_postprocess_edgetpu.tflite + +Run coco model: +python3 -m edgetpuvision.detect \ + --model ${TEST_DATA}/mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite \ + --labels ${TEST_DATA}/coco_labels.txt +""" +import argparse +import time +import re +import svgwrite +import imp +import os +from edgetpu.detection.engine import DetectionEngine +import gstreamer + +def load_labels(path): + p = re.compile(r'\s*(\d+)(.+)') + with open(path, 'r', encoding='utf-8') as f: + lines = (p.match(line).groups() for line in f.readlines()) + return {int(num): text.strip() for num, text in lines} + +def shadow_text(dwg, x, y, text, font_size=20): + dwg.add(dwg.text(text, insert=(x+1, y+1), fill='black', font_size=font_size)) + dwg.add(dwg.text(text, insert=(x, y), fill='white', font_size=font_size)) + +def generate_svg(dwg, objs, labels, text_lines): + width, height = dwg.attribs['width'], dwg.attribs['height'] + for y, line in enumerate(text_lines): + shadow_text(dwg, 10, y*20, line) + for obj in objs: + x0, y0, x1, y1 = obj.bounding_box.flatten().tolist() + x, y, w, h = x0, y0, x1 - x0, y1 - y0 + x, y, w, h = int(x * width), int(y * height), int(w * width), int(h * height) + percent = int(100 * obj.score) + label = '%d%% %s' % (percent, labels[obj.label_id]) + shadow_text(dwg, x, y - 5, label) + dwg.add(dwg.rect(insert=(x,y), size=(w, h), + fill='red', fill_opacity=0.3, stroke='white')) + +def main(): + default_model_dir = '../canned_models' + default_model = 'mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite' + default_labels = 'coco_labels.txt' + parser = argparse.ArgumentParser() + parser.add_argument('--model', help='.tflite model path', + default=os.path.join(default_model_dir,default_model)) + parser.add_argument('--labels', help='label file path', + default=os.path.join(default_model_dir, default_labels)) + parser.add_argument('--top_k', type=int, default=3, + help='number of classes with highest score to display') + parser.add_argument('--threshold', type=float, default=0.1, + help='class score threshold') + args = parser.parse_args() + + print("Loading %s with %s labels."%(args.model, args.labels)) + engine = DetectionEngine(args.model) + labels = load_labels(args.labels) + + last_time = time.monotonic() + def user_callback(image, svg_canvas): + nonlocal last_time + start_time = time.monotonic() + objs = engine.DetectWithImage(image, threshold=args.threshold, + keep_aspect_ratio=True, relative_coord=True, + top_k=args.top_k) + end_time = time.monotonic() + text_lines = [ + 'Inference: %.2f ms' %((end_time - start_time) * 1000), + 'FPS: %.2f fps' %(1.0/(end_time - last_time)), + ] + print(' '.join(text_lines)) + last_time = end_time + generate_svg(svg_canvas, objs, labels, text_lines) + + result = gstreamer.run_pipeline(user_callback) + +if __name__ == '__main__': + main() diff --git a/gstreamer/gstreamer.py b/gstreamer/gstreamer.py new file mode 100644 index 0000000..6b125a8 --- /dev/null +++ b/gstreamer/gstreamer.py @@ -0,0 +1,118 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from functools import partial +import svgwrite + +import gi +gi.require_version('Gst', '1.0') +gi.require_version('GstBase', '1.0') +from gi.repository import GLib, GObject, Gst, GstBase +from PIL import Image + +GObject.threads_init() +Gst.init(None) + +def on_bus_message(bus, message, loop): + t = message.type + if t == Gst.MessageType.EOS: + loop.quit() + elif t == Gst.MessageType.WARNING: + err, debug = message.parse_warning() + sys.stderr.write('Warning: %s: %s\n' % (err, debug)) + elif t == Gst.MessageType.ERROR: + err, debug = message.parse_error() + sys.stderr.write('Error: %s: %s\n' % (err, debug)) + loop.quit() + return True + +def on_new_sample(sink, overlay, screen_size, appsink_size, user_function): + sample = sink.emit('pull-sample') + buf = sample.get_buffer() + result, mapinfo = buf.map(Gst.MapFlags.READ) + if result: + img = Image.frombytes('RGB', (appsink_size[0], appsink_size[1]), mapinfo.data, 'raw') + svg_canvas = svgwrite.Drawing('', size=(screen_size[0], screen_size[1])) + user_function(img, svg_canvas) + overlay.set_property('data', svg_canvas.tostring()) + buf.unmap(mapinfo) + return Gst.FlowReturn.OK + +def detectCoralDevBoard(): + try: + if 'MX8MQ' in open('/sys/firmware/devicetree/base/model').read(): + print('Detected EdgeTPU dev board.') + return True + except: pass + return False + +def run_pipeline(user_function, + src_size=(640,480), + appsink_size=(320, 180)): + PIPELINE = 'v4l2src device=/dev/video0 ! {src_caps} ! {leaky_q} ! tee name=t' + if detectCoralDevBoard(): + SRC_CAPS = 'video/x-raw,format=YUY2,width={width},height={height},framerate=30/1' + PIPELINE += """ + t. ! {leaky_q} ! glupload ! glfilterbin filter=glcolorscale + ! {dl_caps} ! videoconvert ! {sink_caps} ! {sink_element} + t. ! {leaky_q} ! glupload ! glfilterbin filter=glcolorscale + ! rsvgoverlay name=overlay ! waylandsink + """ + else: + SRC_CAPS = 'video/x-raw,width={width},height={height},framerate=30/1' + PIPELINE += """ + t. ! {leaky_q} ! videoconvert ! videoscale ! {sink_caps} ! {sink_element} + t. ! {leaky_q} ! videoconvert + ! rsvgoverlay name=overlay ! videoconvert ! ximagesink + """ + + SINK_ELEMENT = 'appsink name=appsink sync=false emit-signals=true max-buffers=1 drop=true' + DL_CAPS = 'video/x-raw,format=RGBA,width={width},height={height}' + SINK_CAPS = 'video/x-raw,format=RGB,width={width},height={height}' + LEAKY_Q = 'queue max-size-buffers=1 leaky=downstream' + + src_caps = SRC_CAPS.format(width=src_size[0], height=src_size[1]) + dl_caps = DL_CAPS.format(width=appsink_size[0], height=appsink_size[1]) + sink_caps = SINK_CAPS.format(width=appsink_size[0], height=appsink_size[1]) + pipeline = PIPELINE.format(leaky_q=LEAKY_Q, + src_caps=src_caps, dl_caps=dl_caps, sink_caps=sink_caps, + sink_element=SINK_ELEMENT) + + print('Gstreamer pipeline: ', pipeline) + pipeline = Gst.parse_launch(pipeline) + + overlay = pipeline.get_by_name('overlay') + appsink = pipeline.get_by_name('appsink') + appsink.connect('new-sample', partial(on_new_sample, + overlay=overlay, screen_size = src_size, + appsink_size=appsink_size, user_function=user_function)) + loop = GObject.MainLoop() + + # Set up a pipeline bus watch to catch errors. + bus = pipeline.get_bus() + bus.add_signal_watch() + bus.connect('message', on_bus_message, loop) + + # Run pipeline. + pipeline.set_state(Gst.State.PLAYING) + try: + loop.run() + except: + pass + + # Clean up. + pipeline.set_state(Gst.State.NULL) + while GLib.MainContext.default().iteration(False): + pass diff --git a/gstreamer/install_requirements.sh b/gstreamer/install_requirements.sh new file mode 100644 index 0000000..9b0409d --- /dev/null +++ b/gstreamer/install_requirements.sh @@ -0,0 +1,15 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +sudo apt-get install -y gstreamer1.0-plugins-bad gstreamer1.0-plugins-good python3-gst-1.0 python3-gi diff --git a/raspicam/README.md b/raspicam/README.md new file mode 100644 index 0000000..5a011bd --- /dev/null +++ b/raspicam/README.md @@ -0,0 +1,29 @@ +This folder contains some simple camera classification examples specific to Raspberry +Pi, using the picamera python module to access the camera. + +If you dont have picamera installed you can install it by: + +``` +pip3 install picamera +``` + +Don't forget to enable your camera using raspi-config under "Interfacing Options": + +``` +sudo raspi-config +``` + +To run the demo execture the following command, which will use the default +model ```mobilenet_v2_1.0_224_quant_edgetpu.tflite``` + + +``` +python3 classify_capture.py +``` + +You can change the model and the labels file using flags: + +``` +python3 classify_capture.py --model ../canned_models/inception_v3_299_quant_edgetpu.tflite + +``` diff --git a/raspicam/classify_capture.py b/raspicam/classify_capture.py new file mode 100644 index 0000000..5bb8757 --- /dev/null +++ b/raspicam/classify_capture.py @@ -0,0 +1,73 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A demo to classify Raspberry Pi camera stream.""" +import argparse +import io +import time +from collections import deque +import numpy as np +import picamera + +import edgetpu.classification.engine + +def main(): + default_model_dir = "../canned_models" + default_model = 'mobilenet_v2_1.0_224_quant_edgetpu.tflite' + default_labels = 'imagenet_labels.txt' + parser = argparse.ArgumentParser() + parser.add_argument('--model', help='.tflite model path', + default=os.path.join(default_model_dir,default_model)) + parser.add_argument('--labels', help='label file path', + default=os.path.join(default_model_dir, default_labels)) + args = parser.parse_args() + + with open(args.label, 'r') as f: + pairs = (l.strip().split(maxsplit=1) for l in f.readlines()) + labels = dict((int(k), v) for k, v in pairs) + + engine = edgetpu.classification.engine.ClassificationEngine(args.model) + + with picamera.PiCamera() as camera: + camera.resolution = (640, 480) + camera.framerate = 30 + camera.annotate_text_size = 20 + _, width, height, channels = engine.get_input_tensor_shape() + camera.start_preview() + try: + stream = io.BytesIO() + fps = deque(maxlen=20) + fps.append(time.time()) + for foo in camera.capture_continuous(stream, + format='rgb', + use_video_port=True, + resize=(width, height)): + stream.truncate() + stream.seek(0) + input = np.frombuffer(stream.getvalue(), dtype=np.uint8) + start_ms = time.time() + results = engine.ClassifyWithInputTensor(input, top_k=3) + inference_ms = (time.time() - start_ms)*1000.0 + fps.append(time.time()) + fps_ms = len(fps)/(fps[-1] - fps[0]) + camera.annotate_text = "Inference: %5.2fms FPS: %3.1f" % (inference_ms, fps_ms) + for result in results: + camera.annotate_text += "\n%.0f%% %s" % (100*result[1], labels[result[0]]) + print(camera.annotate_text) + finally: + camera.stop_preview() + + +if __name__ == '__main__': + main()