diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/404.html b/404.html new file mode 100644 index 0000000..21fb7f3 --- /dev/null +++ b/404.html @@ -0,0 +1,822 @@ + + + + + + + + + + + + + + + + + + Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/assets/images/favicon.png b/assets/images/favicon.png new file mode 100644 index 0000000..1cf13b9 Binary files /dev/null and b/assets/images/favicon.png differ diff --git a/assets/javascripts/bundle.5cf534bf.min.js b/assets/javascripts/bundle.5cf534bf.min.js new file mode 100644 index 0000000..eb0280e --- /dev/null +++ b/assets/javascripts/bundle.5cf534bf.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var Hi=Object.create;var xr=Object.defineProperty;var Pi=Object.getOwnPropertyDescriptor;var $i=Object.getOwnPropertyNames,kt=Object.getOwnPropertySymbols,Ii=Object.getPrototypeOf,Er=Object.prototype.hasOwnProperty,an=Object.prototype.propertyIsEnumerable;var on=(e,t,r)=>t in e?xr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))Er.call(t,r)&&on(e,r,t[r]);if(kt)for(var r of kt(t))an.call(t,r)&&on(e,r,t[r]);return e};var sn=(e,t)=>{var r={};for(var n in e)Er.call(e,n)&&t.indexOf(n)<0&&(r[n]=e[n]);if(e!=null&&kt)for(var n of kt(e))t.indexOf(n)<0&&an.call(e,n)&&(r[n]=e[n]);return r};var Ht=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var ji=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of $i(t))!Er.call(e,o)&&o!==r&&xr(e,o,{get:()=>t[o],enumerable:!(n=Pi(t,o))||n.enumerable});return e};var yt=(e,t,r)=>(r=e!=null?Hi(Ii(e)):{},ji(t||!e||!e.__esModule?xr(r,"default",{value:e,enumerable:!0}):r,e));var fn=Ht((wr,cn)=>{(function(e,t){typeof wr=="object"&&typeof cn!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(wr,function(){"use strict";function e(r){var n=!0,o=!1,i=null,a={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function s(O){return!!(O&&O!==document&&O.nodeName!=="HTML"&&O.nodeName!=="BODY"&&"classList"in O&&"contains"in O.classList)}function f(O){var Ne=O.type,Ue=O.tagName;return!!(Ue==="INPUT"&&a[Ne]&&!O.readOnly||Ue==="TEXTAREA"&&!O.readOnly||O.isContentEditable)}function c(O){O.classList.contains("focus-visible")||(O.classList.add("focus-visible"),O.setAttribute("data-focus-visible-added",""))}function u(O){O.hasAttribute("data-focus-visible-added")&&(O.classList.remove("focus-visible"),O.removeAttribute("data-focus-visible-added"))}function p(O){O.metaKey||O.altKey||O.ctrlKey||(s(r.activeElement)&&c(r.activeElement),n=!0)}function m(O){n=!1}function d(O){s(O.target)&&(n||f(O.target))&&c(O.target)}function h(O){s(O.target)&&(O.target.classList.contains("focus-visible")||O.target.hasAttribute("data-focus-visible-added"))&&(o=!0,window.clearTimeout(i),i=window.setTimeout(function(){o=!1},100),u(O.target))}function v(O){document.visibilityState==="hidden"&&(o&&(n=!0),B())}function B(){document.addEventListener("mousemove",V),document.addEventListener("mousedown",V),document.addEventListener("mouseup",V),document.addEventListener("pointermove",V),document.addEventListener("pointerdown",V),document.addEventListener("pointerup",V),document.addEventListener("touchmove",V),document.addEventListener("touchstart",V),document.addEventListener("touchend",V)}function re(){document.removeEventListener("mousemove",V),document.removeEventListener("mousedown",V),document.removeEventListener("mouseup",V),document.removeEventListener("pointermove",V),document.removeEventListener("pointerdown",V),document.removeEventListener("pointerup",V),document.removeEventListener("touchmove",V),document.removeEventListener("touchstart",V),document.removeEventListener("touchend",V)}function V(O){O.target.nodeName&&O.target.nodeName.toLowerCase()==="html"||(n=!1,re())}document.addEventListener("keydown",p,!0),document.addEventListener("mousedown",m,!0),document.addEventListener("pointerdown",m,!0),document.addEventListener("touchstart",m,!0),document.addEventListener("visibilitychange",v,!0),B(),r.addEventListener("focus",d,!0),r.addEventListener("blur",h,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var un=Ht(Sr=>{(function(e){var t=function(){try{return!!Symbol.iterator}catch(c){return!1}},r=t(),n=function(c){var u={next:function(){var p=c.shift();return{done:p===void 0,value:p}}};return r&&(u[Symbol.iterator]=function(){return u}),u},o=function(c){return encodeURIComponent(c).replace(/%20/g,"+")},i=function(c){return decodeURIComponent(String(c).replace(/\+/g," "))},a=function(){var c=function(p){Object.defineProperty(this,"_entries",{writable:!0,value:{}});var m=typeof p;if(m!=="undefined")if(m==="string")p!==""&&this._fromString(p);else if(p instanceof c){var d=this;p.forEach(function(re,V){d.append(V,re)})}else if(p!==null&&m==="object")if(Object.prototype.toString.call(p)==="[object Array]")for(var h=0;hd[0]?1:0}),c._entries&&(c._entries={});for(var p=0;p1?i(d[1]):"")}})})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Sr);(function(e){var t=function(){try{var o=new e.URL("b","http://a");return o.pathname="c d",o.href==="http://a/c%20d"&&o.searchParams}catch(i){return!1}},r=function(){var o=e.URL,i=function(f,c){typeof f!="string"&&(f=String(f)),c&&typeof c!="string"&&(c=String(c));var u=document,p;if(c&&(e.location===void 0||c!==e.location.href)){c=c.toLowerCase(),u=document.implementation.createHTMLDocument(""),p=u.createElement("base"),p.href=c,u.head.appendChild(p);try{if(p.href.indexOf(c)!==0)throw new Error(p.href)}catch(O){throw new Error("URL unable to set base "+c+" due to "+O)}}var m=u.createElement("a");m.href=f,p&&(u.body.appendChild(m),m.href=m.href);var d=u.createElement("input");if(d.type="url",d.value=f,m.protocol===":"||!/:/.test(m.href)||!d.checkValidity()&&!c)throw new TypeError("Invalid URL");Object.defineProperty(this,"_anchorElement",{value:m});var h=new e.URLSearchParams(this.search),v=!0,B=!0,re=this;["append","delete","set"].forEach(function(O){var Ne=h[O];h[O]=function(){Ne.apply(h,arguments),v&&(B=!1,re.search=h.toString(),B=!0)}}),Object.defineProperty(this,"searchParams",{value:h,enumerable:!0});var V=void 0;Object.defineProperty(this,"_updateSearchParams",{enumerable:!1,configurable:!1,writable:!1,value:function(){this.search!==V&&(V=this.search,B&&(v=!1,this.searchParams._fromString(this.search),v=!0))}})},a=i.prototype,s=function(f){Object.defineProperty(a,f,{get:function(){return this._anchorElement[f]},set:function(c){this._anchorElement[f]=c},enumerable:!0})};["hash","host","hostname","port","protocol"].forEach(function(f){s(f)}),Object.defineProperty(a,"search",{get:function(){return this._anchorElement.search},set:function(f){this._anchorElement.search=f,this._updateSearchParams()},enumerable:!0}),Object.defineProperties(a,{toString:{get:function(){var f=this;return function(){return f.href}}},href:{get:function(){return this._anchorElement.href.replace(/\?$/,"")},set:function(f){this._anchorElement.href=f,this._updateSearchParams()},enumerable:!0},pathname:{get:function(){return this._anchorElement.pathname.replace(/(^\/?)/,"/")},set:function(f){this._anchorElement.pathname=f},enumerable:!0},origin:{get:function(){var f={"http:":80,"https:":443,"ftp:":21}[this._anchorElement.protocol],c=this._anchorElement.port!=f&&this._anchorElement.port!=="";return this._anchorElement.protocol+"//"+this._anchorElement.hostname+(c?":"+this._anchorElement.port:"")},enumerable:!0},password:{get:function(){return""},set:function(f){},enumerable:!0},username:{get:function(){return""},set:function(f){},enumerable:!0}}),i.createObjectURL=function(f){return o.createObjectURL.apply(o,arguments)},i.revokeObjectURL=function(f){return o.revokeObjectURL.apply(o,arguments)},e.URL=i};if(t()||r(),e.location!==void 0&&!("origin"in e.location)){var n=function(){return e.location.protocol+"//"+e.location.hostname+(e.location.port?":"+e.location.port:"")};try{Object.defineProperty(e.location,"origin",{get:n,enumerable:!0})}catch(o){setInterval(function(){e.location.origin=n()},100)}}})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Sr)});var Qr=Ht((Lt,Kr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof Lt=="object"&&typeof Kr=="object"?Kr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Lt=="object"?Lt.ClipboardJS=r():t.ClipboardJS=r()})(Lt,function(){return function(){var e={686:function(n,o,i){"use strict";i.d(o,{default:function(){return ki}});var a=i(279),s=i.n(a),f=i(370),c=i.n(f),u=i(817),p=i.n(u);function m(F){try{return document.execCommand(F)}catch(T){return!1}}var d=function(T){var w=p()(T);return m("cut"),w},h=d;function v(F){var T=document.documentElement.getAttribute("dir")==="rtl",w=document.createElement("textarea");w.style.fontSize="12pt",w.style.border="0",w.style.padding="0",w.style.margin="0",w.style.position="absolute",w.style[T?"right":"left"]="-9999px";var k=window.pageYOffset||document.documentElement.scrollTop;return w.style.top="".concat(k,"px"),w.setAttribute("readonly",""),w.value=F,w}var B=function(T,w){var k=v(T);w.container.appendChild(k);var j=p()(k);return m("copy"),k.remove(),j},re=function(T){var w=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},k="";return typeof T=="string"?k=B(T,w):T instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(T==null?void 0:T.type)?k=B(T.value,w):(k=p()(T),m("copy")),k},V=re;function O(F){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?O=function(w){return typeof w}:O=function(w){return w&&typeof Symbol=="function"&&w.constructor===Symbol&&w!==Symbol.prototype?"symbol":typeof w},O(F)}var Ne=function(){var T=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},w=T.action,k=w===void 0?"copy":w,j=T.container,N=T.target,Me=T.text;if(k!=="copy"&&k!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(N!==void 0)if(N&&O(N)==="object"&&N.nodeType===1){if(k==="copy"&&N.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(k==="cut"&&(N.hasAttribute("readonly")||N.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Me)return V(Me,{container:j});if(N)return k==="cut"?h(N):V(N,{container:j})},Ue=Ne;function $e(F){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?$e=function(w){return typeof w}:$e=function(w){return w&&typeof Symbol=="function"&&w.constructor===Symbol&&w!==Symbol.prototype?"symbol":typeof w},$e(F)}function Oi(F,T){if(!(F instanceof T))throw new TypeError("Cannot call a class as a function")}function nn(F,T){for(var w=0;w0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof j.action=="function"?j.action:this.defaultAction,this.target=typeof j.target=="function"?j.target:this.defaultTarget,this.text=typeof j.text=="function"?j.text:this.defaultText,this.container=$e(j.container)==="object"?j.container:document.body}},{key:"listenClick",value:function(j){var N=this;this.listener=c()(j,"click",function(Me){return N.onClick(Me)})}},{key:"onClick",value:function(j){var N=j.delegateTarget||j.currentTarget,Me=this.action(N)||"copy",Rt=Ue({action:Me,container:this.container,target:this.target(N),text:this.text(N)});this.emit(Rt?"success":"error",{action:Me,text:Rt,trigger:N,clearSelection:function(){N&&N.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(j){return yr("action",j)}},{key:"defaultTarget",value:function(j){var N=yr("target",j);if(N)return document.querySelector(N)}},{key:"defaultText",value:function(j){return yr("text",j)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(j){var N=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return V(j,N)}},{key:"cut",value:function(j){return h(j)}},{key:"isSupported",value:function(){var j=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],N=typeof j=="string"?[j]:j,Me=!!document.queryCommandSupported;return N.forEach(function(Rt){Me=Me&&!!document.queryCommandSupported(Rt)}),Me}}]),w}(s()),ki=Ri},828:function(n){var o=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function a(s,f){for(;s&&s.nodeType!==o;){if(typeof s.matches=="function"&&s.matches(f))return s;s=s.parentNode}}n.exports=a},438:function(n,o,i){var a=i(828);function s(u,p,m,d,h){var v=c.apply(this,arguments);return u.addEventListener(m,v,h),{destroy:function(){u.removeEventListener(m,v,h)}}}function f(u,p,m,d,h){return typeof u.addEventListener=="function"?s.apply(null,arguments):typeof m=="function"?s.bind(null,document).apply(null,arguments):(typeof u=="string"&&(u=document.querySelectorAll(u)),Array.prototype.map.call(u,function(v){return s(v,p,m,d,h)}))}function c(u,p,m,d){return function(h){h.delegateTarget=a(h.target,p),h.delegateTarget&&d.call(u,h)}}n.exports=f},879:function(n,o){o.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},o.nodeList=function(i){var a=Object.prototype.toString.call(i);return i!==void 0&&(a==="[object NodeList]"||a==="[object HTMLCollection]")&&"length"in i&&(i.length===0||o.node(i[0]))},o.string=function(i){return typeof i=="string"||i instanceof String},o.fn=function(i){var a=Object.prototype.toString.call(i);return a==="[object Function]"}},370:function(n,o,i){var a=i(879),s=i(438);function f(m,d,h){if(!m&&!d&&!h)throw new Error("Missing required arguments");if(!a.string(d))throw new TypeError("Second argument must be a String");if(!a.fn(h))throw new TypeError("Third argument must be a Function");if(a.node(m))return c(m,d,h);if(a.nodeList(m))return u(m,d,h);if(a.string(m))return p(m,d,h);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function c(m,d,h){return m.addEventListener(d,h),{destroy:function(){m.removeEventListener(d,h)}}}function u(m,d,h){return Array.prototype.forEach.call(m,function(v){v.addEventListener(d,h)}),{destroy:function(){Array.prototype.forEach.call(m,function(v){v.removeEventListener(d,h)})}}}function p(m,d,h){return s(document.body,m,d,h)}n.exports=f},817:function(n){function o(i){var a;if(i.nodeName==="SELECT")i.focus(),a=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var s=i.hasAttribute("readonly");s||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),s||i.removeAttribute("readonly"),a=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var f=window.getSelection(),c=document.createRange();c.selectNodeContents(i),f.removeAllRanges(),f.addRange(c),a=f.toString()}return a}n.exports=o},279:function(n){function o(){}o.prototype={on:function(i,a,s){var f=this.e||(this.e={});return(f[i]||(f[i]=[])).push({fn:a,ctx:s}),this},once:function(i,a,s){var f=this;function c(){f.off(i,c),a.apply(s,arguments)}return c._=a,this.on(i,c,s)},emit:function(i){var a=[].slice.call(arguments,1),s=((this.e||(this.e={}))[i]||[]).slice(),f=0,c=s.length;for(f;f{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var is=/["'&<>]/;Jo.exports=as;function as(e){var t=""+e,r=is.exec(t);if(!r)return t;var n,o="",i=0,a=0;for(i=r.index;i0&&i[i.length-1])&&(c[0]===6||c[0]===2)){r=0;continue}if(c[0]===3&&(!i||c[1]>i[0]&&c[1]=e.length&&(e=void 0),{value:e&&e[n++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function W(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var n=r.call(e),o,i=[],a;try{for(;(t===void 0||t-- >0)&&!(o=n.next()).done;)i.push(o.value)}catch(s){a={error:s}}finally{try{o&&!o.done&&(r=n.return)&&r.call(n)}finally{if(a)throw a.error}}return i}function D(e,t,r){if(r||arguments.length===2)for(var n=0,o=t.length,i;n1||s(m,d)})})}function s(m,d){try{f(n[m](d))}catch(h){p(i[0][3],h)}}function f(m){m.value instanceof Xe?Promise.resolve(m.value.v).then(c,u):p(i[0][2],m)}function c(m){s("next",m)}function u(m){s("throw",m)}function p(m,d){m(d),i.shift(),i.length&&s(i[0][0],i[0][1])}}function mn(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof xe=="function"?xe(e):e[Symbol.iterator](),r={},n("next"),n("throw"),n("return"),r[Symbol.asyncIterator]=function(){return this},r);function n(i){r[i]=e[i]&&function(a){return new Promise(function(s,f){a=e[i](a),o(s,f,a.done,a.value)})}}function o(i,a,s,f){Promise.resolve(f).then(function(c){i({value:c,done:s})},a)}}function A(e){return typeof e=="function"}function at(e){var t=function(n){Error.call(n),n.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var $t=at(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(n,o){return o+1+") "+n.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function We(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Ie=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,n,o,i;if(!this.closed){this.closed=!0;var a=this._parentage;if(a)if(this._parentage=null,Array.isArray(a))try{for(var s=xe(a),f=s.next();!f.done;f=s.next()){var c=f.value;c.remove(this)}}catch(v){t={error:v}}finally{try{f&&!f.done&&(r=s.return)&&r.call(s)}finally{if(t)throw t.error}}else a.remove(this);var u=this.initialTeardown;if(A(u))try{u()}catch(v){i=v instanceof $t?v.errors:[v]}var p=this._finalizers;if(p){this._finalizers=null;try{for(var m=xe(p),d=m.next();!d.done;d=m.next()){var h=d.value;try{dn(h)}catch(v){i=i!=null?i:[],v instanceof $t?i=D(D([],W(i)),W(v.errors)):i.push(v)}}}catch(v){n={error:v}}finally{try{d&&!d.done&&(o=m.return)&&o.call(m)}finally{if(n)throw n.error}}}if(i)throw new $t(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)dn(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&We(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&We(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Tr=Ie.EMPTY;function It(e){return e instanceof Ie||e&&"closed"in e&&A(e.remove)&&A(e.add)&&A(e.unsubscribe)}function dn(e){A(e)?e():e.unsubscribe()}var Le={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var st={setTimeout:function(e,t){for(var r=[],n=2;n0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var n=this,o=this,i=o.hasError,a=o.isStopped,s=o.observers;return i||a?Tr:(this.currentObservers=null,s.push(r),new Ie(function(){n.currentObservers=null,We(s,r)}))},t.prototype._checkFinalizedStatuses=function(r){var n=this,o=n.hasError,i=n.thrownError,a=n.isStopped;o?r.error(i):a&&r.complete()},t.prototype.asObservable=function(){var r=new U;return r.source=this,r},t.create=function(r,n){return new wn(r,n)},t}(U);var wn=function(e){ne(t,e);function t(r,n){var o=e.call(this)||this;return o.destination=r,o.source=n,o}return t.prototype.next=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.next)===null||o===void 0||o.call(n,r)},t.prototype.error=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.error)===null||o===void 0||o.call(n,r)},t.prototype.complete=function(){var r,n;(n=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||n===void 0||n.call(r)},t.prototype._subscribe=function(r){var n,o;return(o=(n=this.source)===null||n===void 0?void 0:n.subscribe(r))!==null&&o!==void 0?o:Tr},t}(E);var Et={now:function(){return(Et.delegate||Date).now()},delegate:void 0};var wt=function(e){ne(t,e);function t(r,n,o){r===void 0&&(r=1/0),n===void 0&&(n=1/0),o===void 0&&(o=Et);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=n,i._timestampProvider=o,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=n===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,n),i}return t.prototype.next=function(r){var n=this,o=n.isStopped,i=n._buffer,a=n._infiniteTimeWindow,s=n._timestampProvider,f=n._windowTime;o||(i.push(r),!a&&i.push(s.now()+f)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var n=this._innerSubscribe(r),o=this,i=o._infiniteTimeWindow,a=o._buffer,s=a.slice(),f=0;f0?e.prototype.requestAsyncId.call(this,r,n,o):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,n,o){var i;if(o===void 0&&(o=0),o!=null?o>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,n,o);var a=r.actions;n!=null&&((i=a[a.length-1])===null||i===void 0?void 0:i.id)!==n&&(ut.cancelAnimationFrame(n),r._scheduled=void 0)},t}(Ut);var Tn=function(e){ne(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var n=this._scheduled;this._scheduled=void 0;var o=this.actions,i;r=r||o.shift();do if(i=r.execute(r.state,r.delay))break;while((r=o[0])&&r.id===n&&o.shift());if(this._active=!1,i){for(;(r=o[0])&&r.id===n&&o.shift();)r.unsubscribe();throw i}},t}(Wt);var we=new Tn(On);var R=new U(function(e){return e.complete()});function Dt(e){return e&&A(e.schedule)}function kr(e){return e[e.length-1]}function qe(e){return A(kr(e))?e.pop():void 0}function Se(e){return Dt(kr(e))?e.pop():void 0}function Vt(e,t){return typeof kr(e)=="number"?e.pop():t}var pt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function zt(e){return A(e==null?void 0:e.then)}function Nt(e){return A(e[ft])}function qt(e){return Symbol.asyncIterator&&A(e==null?void 0:e[Symbol.asyncIterator])}function Kt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Ki(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Qt=Ki();function Yt(e){return A(e==null?void 0:e[Qt])}function Gt(e){return ln(this,arguments,function(){var r,n,o,i;return Pt(this,function(a){switch(a.label){case 0:r=e.getReader(),a.label=1;case 1:a.trys.push([1,,9,10]),a.label=2;case 2:return[4,Xe(r.read())];case 3:return n=a.sent(),o=n.value,i=n.done,i?[4,Xe(void 0)]:[3,5];case 4:return[2,a.sent()];case 5:return[4,Xe(o)];case 6:return[4,a.sent()];case 7:return a.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function Bt(e){return A(e==null?void 0:e.getReader)}function $(e){if(e instanceof U)return e;if(e!=null){if(Nt(e))return Qi(e);if(pt(e))return Yi(e);if(zt(e))return Gi(e);if(qt(e))return _n(e);if(Yt(e))return Bi(e);if(Bt(e))return Ji(e)}throw Kt(e)}function Qi(e){return new U(function(t){var r=e[ft]();if(A(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Yi(e){return new U(function(t){for(var r=0;r=2;return function(n){return n.pipe(e?M(function(o,i){return e(o,i,n)}):me,Te(1),r?ke(t):zn(function(){return new Xt}))}}function Nn(){for(var e=[],t=0;t=2,!0))}function fe(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new E}:t,n=e.resetOnError,o=n===void 0?!0:n,i=e.resetOnComplete,a=i===void 0?!0:i,s=e.resetOnRefCountZero,f=s===void 0?!0:s;return function(c){var u,p,m,d=0,h=!1,v=!1,B=function(){p==null||p.unsubscribe(),p=void 0},re=function(){B(),u=m=void 0,h=v=!1},V=function(){var O=u;re(),O==null||O.unsubscribe()};return g(function(O,Ne){d++,!v&&!h&&B();var Ue=m=m!=null?m:r();Ne.add(function(){d--,d===0&&!v&&!h&&(p=Fr(V,f))}),Ue.subscribe(Ne),!u&&d>0&&(u=new et({next:function($e){return Ue.next($e)},error:function($e){v=!0,B(),p=Fr(re,o,$e),Ue.error($e)},complete:function(){h=!0,B(),p=Fr(re,a),Ue.complete()}}),$(O).subscribe(u))})(c)}}function Fr(e,t){for(var r=[],n=2;ne.next(document)),e}function K(e,t=document){return Array.from(t.querySelectorAll(e))}function q(e,t=document){let r=se(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function se(e,t=document){return t.querySelector(e)||void 0}function je(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}function tr(e){return L(b(document.body,"focusin"),b(document.body,"focusout")).pipe(Re(1),l(()=>{let t=je();return typeof t!="undefined"?e.contains(t):!1}),z(e===je()),Y())}function Ye(e){return{x:e.offsetLeft,y:e.offsetTop}}function Yn(e){return L(b(window,"load"),b(window,"resize")).pipe(Ae(0,we),l(()=>Ye(e)),z(Ye(e)))}function rr(e){return{x:e.scrollLeft,y:e.scrollTop}}function dt(e){return L(b(e,"scroll"),b(window,"resize")).pipe(Ae(0,we),l(()=>rr(e)),z(rr(e)))}var Bn=function(){if(typeof Map!="undefined")return Map;function e(t,r){var n=-1;return t.some(function(o,i){return o[0]===r?(n=i,!0):!1}),n}return function(){function t(){this.__entries__=[]}return Object.defineProperty(t.prototype,"size",{get:function(){return this.__entries__.length},enumerable:!0,configurable:!0}),t.prototype.get=function(r){var n=e(this.__entries__,r),o=this.__entries__[n];return o&&o[1]},t.prototype.set=function(r,n){var o=e(this.__entries__,r);~o?this.__entries__[o][1]=n:this.__entries__.push([r,n])},t.prototype.delete=function(r){var n=this.__entries__,o=e(n,r);~o&&n.splice(o,1)},t.prototype.has=function(r){return!!~e(this.__entries__,r)},t.prototype.clear=function(){this.__entries__.splice(0)},t.prototype.forEach=function(r,n){n===void 0&&(n=null);for(var o=0,i=this.__entries__;o0},e.prototype.connect_=function(){!zr||this.connected_||(document.addEventListener("transitionend",this.onTransitionEnd_),window.addEventListener("resize",this.refresh),xa?(this.mutationsObserver_=new MutationObserver(this.refresh),this.mutationsObserver_.observe(document,{attributes:!0,childList:!0,characterData:!0,subtree:!0})):(document.addEventListener("DOMSubtreeModified",this.refresh),this.mutationEventsAdded_=!0),this.connected_=!0)},e.prototype.disconnect_=function(){!zr||!this.connected_||(document.removeEventListener("transitionend",this.onTransitionEnd_),window.removeEventListener("resize",this.refresh),this.mutationsObserver_&&this.mutationsObserver_.disconnect(),this.mutationEventsAdded_&&document.removeEventListener("DOMSubtreeModified",this.refresh),this.mutationsObserver_=null,this.mutationEventsAdded_=!1,this.connected_=!1)},e.prototype.onTransitionEnd_=function(t){var r=t.propertyName,n=r===void 0?"":r,o=ya.some(function(i){return!!~n.indexOf(i)});o&&this.refresh()},e.getInstance=function(){return this.instance_||(this.instance_=new e),this.instance_},e.instance_=null,e}(),Jn=function(e,t){for(var r=0,n=Object.keys(t);r0},e}(),Zn=typeof WeakMap!="undefined"?new WeakMap:new Bn,eo=function(){function e(t){if(!(this instanceof e))throw new TypeError("Cannot call a class as a function.");if(!arguments.length)throw new TypeError("1 argument required, but only 0 present.");var r=Ea.getInstance(),n=new Ra(t,r,this);Zn.set(this,n)}return e}();["observe","unobserve","disconnect"].forEach(function(e){eo.prototype[e]=function(){var t;return(t=Zn.get(this))[e].apply(t,arguments)}});var ka=function(){return typeof nr.ResizeObserver!="undefined"?nr.ResizeObserver:eo}(),to=ka;var ro=new E,Ha=I(()=>H(new to(e=>{for(let t of e)ro.next(t)}))).pipe(x(e=>L(Oe,H(e)).pipe(C(()=>e.disconnect()))),J(1));function de(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ge(e){return Ha.pipe(S(t=>t.observe(e)),x(t=>ro.pipe(M(({target:r})=>r===e),C(()=>t.unobserve(e)),l(()=>de(e)))),z(de(e)))}function bt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function ar(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var no=new E,Pa=I(()=>H(new IntersectionObserver(e=>{for(let t of e)no.next(t)},{threshold:0}))).pipe(x(e=>L(Oe,H(e)).pipe(C(()=>e.disconnect()))),J(1));function sr(e){return Pa.pipe(S(t=>t.observe(e)),x(t=>no.pipe(M(({target:r})=>r===e),C(()=>t.unobserve(e)),l(({isIntersecting:r})=>r))))}function oo(e,t=16){return dt(e).pipe(l(({y:r})=>{let n=de(e),o=bt(e);return r>=o.height-n.height-t}),Y())}var cr={drawer:q("[data-md-toggle=drawer]"),search:q("[data-md-toggle=search]")};function io(e){return cr[e].checked}function Ge(e,t){cr[e].checked!==t&&cr[e].click()}function Be(e){let t=cr[e];return b(t,"change").pipe(l(()=>t.checked),z(t.checked))}function $a(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ia(){return L(b(window,"compositionstart").pipe(l(()=>!0)),b(window,"compositionend").pipe(l(()=>!1))).pipe(z(!1))}function ao(){let e=b(window,"keydown").pipe(M(t=>!(t.metaKey||t.ctrlKey)),l(t=>({mode:io("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),M(({mode:t,type:r})=>{if(t==="global"){let n=je();if(typeof n!="undefined")return!$a(n,r)}return!0}),fe());return Ia().pipe(x(t=>t?R:e))}function _e(){return new URL(location.href)}function ot(e){location.href=e.href}function so(){return new E}function co(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)co(e,r)}function _(e,t,...r){let n=document.createElement(e);if(t)for(let o of Object.keys(t))typeof t[o]!="undefined"&&(typeof t[o]!="boolean"?n.setAttribute(o,t[o]):n.setAttribute(o,""));for(let o of r)co(n,o);return n}function fr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function fo(){return location.hash.substring(1)}function uo(e){let t=_("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function ja(){return b(window,"hashchange").pipe(l(fo),z(fo()),M(e=>e.length>0),J(1))}function po(){return ja().pipe(l(e=>se(`[id="${e}"]`)),M(e=>typeof e!="undefined"))}function Nr(e){let t=matchMedia(e);return Zt(r=>t.addListener(()=>r(t.matches))).pipe(z(t.matches))}function lo(){let e=matchMedia("print");return L(b(window,"beforeprint").pipe(l(()=>!0)),b(window,"afterprint").pipe(l(()=>!1))).pipe(z(e.matches))}function qr(e,t){return e.pipe(x(r=>r?t():R))}function ur(e,t={credentials:"same-origin"}){return ve(fetch(`${e}`,t)).pipe(ce(()=>R),x(r=>r.status!==200?Ot(()=>new Error(r.statusText)):H(r)))}function Fe(e,t){return ur(e,t).pipe(x(r=>r.json()),J(1))}function mo(e,t){let r=new DOMParser;return ur(e,t).pipe(x(n=>n.text()),l(n=>r.parseFromString(n,"text/xml")),J(1))}function pr(e){let t=_("script",{src:e});return I(()=>(document.head.appendChild(t),L(b(t,"load"),b(t,"error").pipe(x(()=>Ot(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(l(()=>{}),C(()=>document.head.removeChild(t)),Te(1))))}function ho(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function bo(){return L(b(window,"scroll",{passive:!0}),b(window,"resize",{passive:!0})).pipe(l(ho),z(ho()))}function vo(){return{width:innerWidth,height:innerHeight}}function go(){return b(window,"resize",{passive:!0}).pipe(l(vo),z(vo()))}function yo(){return Q([bo(),go()]).pipe(l(([e,t])=>({offset:e,size:t})),J(1))}function lr(e,{viewport$:t,header$:r}){let n=t.pipe(X("size")),o=Q([n,r]).pipe(l(()=>Ye(e)));return Q([r,t,o]).pipe(l(([{height:i},{offset:a,size:s},{x:f,y:c}])=>({offset:{x:a.x-f,y:a.y-c+i},size:s})))}(()=>{function e(n,o){parent.postMessage(n,o||"*")}function t(...n){return n.reduce((o,i)=>o.then(()=>new Promise(a=>{let s=document.createElement("script");s.src=i,s.onload=a,document.body.appendChild(s)})),Promise.resolve())}var r=class{constructor(n){this.url=n,this.onerror=null,this.onmessage=null,this.onmessageerror=null,this.m=a=>{a.source===this.w&&(a.stopImmediatePropagation(),this.dispatchEvent(new MessageEvent("message",{data:a.data})),this.onmessage&&this.onmessage(a))},this.e=(a,s,f,c,u)=>{if(s===this.url.toString()){let p=new ErrorEvent("error",{message:a,filename:s,lineno:f,colno:c,error:u});this.dispatchEvent(p),this.onerror&&this.onerror(p)}};let o=new EventTarget;this.addEventListener=o.addEventListener.bind(o),this.removeEventListener=o.removeEventListener.bind(o),this.dispatchEvent=o.dispatchEvent.bind(o);let i=document.createElement("iframe");i.width=i.height=i.frameBorder="0",document.body.appendChild(this.iframe=i),this.w.document.open(),this.w.document.write(` + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

CSI camera on Raspberry Pi (legacy)

+

CSI Camera V2

+

CSI Camera V2 +as of Sat 23 Mar 08:47:12 UTC 2024.

+

Example for older operating systems (those with command raspistill):

+
    +
  • copy csi-legacy.dist as .env if you want to use Raspberry Pi camera
  • +
  • in copied file .env replace token-change-me with the value of the token + you copied
  • +
  • in copied file .env replace fingerprint-change-me with some random value, + which is alphanumeric and has at least 16 chars (and max of 40 chars), + for example set it to fingerprint-myprinter-camera-1
  • +
  • save edited file .env
  • +
+

Next, test config.

+

Real world scenario

+

Some older Rpi 3 with older Debian with basic cam:

+
PRINTER_ADDRESS=127.0.0.1
+PRUSA_CONNECT_CAMERA_TOKEN=token-change-me
+PRUSA_CONNECT_CAMERA_FINGERPRINT=trash-cam-night-video-wide-1
+CAMERA_DEVICE=/dev/video0
+CAMERA_COMMAND=raspistill
+CAMERA_COMMAND_EXTRA_PARAMS="--nopreview --mode 640:480 -o"
+
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/config.for.camera.csi.libcamera/index.html b/config.for.camera.csi.libcamera/index.html new file mode 100644 index 0000000..c8e9393 --- /dev/null +++ b/config.for.camera.csi.libcamera/index.html @@ -0,0 +1,981 @@ + + + + + + + + + + + + + + + + + + + + + + + + CSI - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

CSI camera on Raspberry Pi

+

CSI Camera V2

+

CSI Camera V2 +as of Sat 23 Mar 08:47:12 UTC 2024.

+

Example for newer operating systems (commands libcamera or rpicam-still):

+
    +
  • copy csi.dist as .env if you want to use Raspberry Pi camera
  • +
  • in copied file .env replace token-change-me with the value of the token + you copied
  • +
  • in copied file .env replace fingerprint-change-me with some random value, + which is alphanumeric and has at least 16 chars (and max of 40 chars), + for example set it to fingerprint-myprinter-camera-1
  • +
  • save edited file .env
  • +
+

Next, test config.

+

Real example

+

My Rpi Zero W with Raspberry Pi Camera v2 with +maximum resolution available:

+ +
PRINTER_ADDRESS=192.168.1.25
+PRUSA_CONNECT_CAMERA_TOKEN=redacted
+PRUSA_CONNECT_CAMERA_FINGERPRINT=c10eb887-f107-41a4-900e-2c38ea12a11c
+CAMERA_DEVICE=/dev/video0
+CAMERA_COMMAND=rpicam-still
+CAMERA_COMMAND_EXTRA_PARAMS="--immediate --nopreview --mode 2592:1944:12:P --lores-width 0 --lores-height 0 --thumb none -o"
+
+ + +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/config.for.camera.esphome.snapshot/index.html b/config.for.camera.esphome.snapshot/index.html new file mode 100644 index 0000000..d284e4e --- /dev/null +++ b/config.for.camera.esphome.snapshot/index.html @@ -0,0 +1,1025 @@ + + + + + + + + + + + + + + + + + + + + + + + + ESPHome Snapshot - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

ESPHome camera snapshot

+

esp32-wrover-dev

+

With esphome camera with snapshot we can use the ultimate power of curl +command to fetch the image from the camera.

+

Prepare esphome device

+

Configure esphome device:

+
    +
  • install esphome camera + on the device and add esp32_camera and esp32_camera_web_server with + snapshot modules:
  • +
+
esp32_camera:
+... (skipped due to the fact there are different modules)
+
+esp32_camera_web_server:
+  - port: 8081
+    mode: snapshot
+
+

Flash the device and wait until it boots and is available.

+

Create config for script

+
    +
  • copy esphome-snapshot.dist as .env
  • +
  • in copied file .env replace token-change-me with the value + of the token you copied
  • +
  • in copied file .env replace fingerprint-change-me with some + random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), + for example set it to fingerprint-myprinter3-camera-3
  • +
  • in copied file .env replace your esphome device address and port + in CAMERA_COMMAND_EXTRA_PARAMS
  • +
  • save edited file .env
  • +
+

Next, test config.

+

Real world example

+

I have esp32-wrover-dev board with camera + esphome + web ui for camera exposing +snapshot frame on port 8081.

+

We can use curl to fetch it.

+
PRINTER_ADDRESS=127.0.0.1
+PRUSA_CONNECT_CAMERA_TOKEN=redacted
+PRUSA_CONNECT_CAMERA_FINGERPRINT=06f47777-f179-4025-bd80-9e4cb8db2aed
+CAMERA_DEVICE=/dev/null
+CAMERA_COMMAND=curl
+CAMERA_COMMAND_EXTRA_PARAMS=http://esp32-wrover-0461c8.local:8081/ -o
+
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/config.for.camera.esphome.stream/index.html b/config.for.camera.esphome.stream/index.html new file mode 100644 index 0000000..7938e99 --- /dev/null +++ b/config.for.camera.esphome.stream/index.html @@ -0,0 +1,1028 @@ + + + + + + + + + + + + + + + + + + + + + + + + ESPHome Stream - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

ESPHome camera stream

+

esp32-wrover-dev

+

With esphome camera stream we can use the ffmpeg to fetch the image from the +camera stream. It requires a bit more computing power from esp device and the +host that runs the image processing.

+

Notice that this is not recommended way due to the amount of consumed resources.

+

Prepare esphome device

+

Configure esphome device:

+
    +
  • install esphome camera + on the device and add esp32_camera and esp32_camera_web_server with + stream modules:
  • +
+
esp32_camera:
+... (skipped due to the fact there are different modules)
+
+esp32_camera_web_server:
+  - port: 8080
+    mode: stream
+
+

Flash the device and wait until it boots and is available.

+

Create config for script

+
    +
  • copy esphome-stream.dist as .env
  • +
  • in copied file .env replace token-change-me with the value + of the token you copied
  • +
  • in copied file .env replace fingerprint-change-me with some + random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), + for example set it to fingerprint-myprinter3-camera-3
  • +
  • in copied file .env replace your esphome device address and port + in CAMERA_COMMAND_EXTRA_PARAMS
  • +
  • notice that -update 1 may not be needed in certain ffmpeg versions
  • +
  • save edited file .env
  • +
+

Next, test config.

+

Real world example

+

The same ESP device with stream, notice different port (8080).

+ +
PRINTER_ADDRESS=127.0.0.1
+PRUSA_CONNECT_CAMERA_TOKEN=token-change-me
+PRUSA_CONNECT_CAMERA_FINGERPRINT=f68336b-8dab-42cd-8729-6abd8855ff63
+CAMERA_DEVICE=/dev/null
+CAMERA_COMMAND=ffmpeg
+CAMERA_COMMAND_EXTRA_PARAMS="-y -i 'http://esp32-wrover-0461c8.local:8080/' -vframes 1 -q:v 1 -f image2 -update 1 "
+
+ + +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/config.for.camera.mjpg/index.html b/config.for.camera.mjpg/index.html new file mode 100644 index 0000000..35d0611 --- /dev/null +++ b/config.for.camera.mjpg/index.html @@ -0,0 +1,996 @@ + + + + + + + + + + + + + + + + + + + + + + + + Web Cams - MJPG streams - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Web Cam - MJPG stream

+

This processing requires ffmpeg package.

+

Most standalone webcams are actually mjpg cams, they send infinite motion jpeg stream +over specific URL.

+

The best option to check what is the URL is in the camera manual, or if you +open web UI of the camera and see the stream image then right click on the image +and select Inspect to see the URL for the image - copy that URL.

+

You should be able to test the stream locally with ffplay command.

+

For example, if your camera is reachable over address 192.168.0.20 and port 8000 +under endpoint /ipcam/mjpeg.cgi then below command should show the stream:

+
ffplay http://192.168.0.20:8000/ipcam/mjpeg.cgi
+
+

There may be some user and password in the URL.

+

If that works, then configuration should be pretty straightforward:

+
    +
  • copy ffmpeg-mjpg-stream.dist as .env
  • +
  • in copied file .env replace token-change-me with the value + of the token you copied
  • +
  • in copied file .env replace fingerprint-change-me + with some random value, which is alphanumeric and has at least 16 chars + (and max of 40 chars), for example set it to fingerprint-myprinter4-camera-4
  • +
  • in copied file .env replace your RTSP device address raspberry-pi, + port and stream id in CAMERA_COMMAND_EXTRA_PARAMS if needed
  • +
  • save edited file .env
  • +
+

Next, test config.

+

Unverified example

+

Beagle Camera stream - if I remember correctly, then camera url to the stream +is something like http://192.168.2.92/ipcam/mjpeg.cgi

+

Replace 192.168.2.92 with your address in the example below.

+ +
PRINTER_ADDRESS=127.0.0.1
+PRUSA_CONNECT_CAMERA_TOKEN=token-change-me
+PRUSA_CONNECT_CAMERA_FINGERPRINT=fingerprint-change-me
+CAMERA_DEVICE=/dev/null
+CAMERA_COMMAND=ffmpeg
+CAMERA_COMMAND_EXTRA_PARAMS="-y -i 'http://192.168.2.92/ipcam/mjpeg.cgi' -vframes 1 -q:v 1 -f image2 -update 1 "
+
+ + +

But it is better to use a snapshot instead of stream if available, +see here.

+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/config.for.camera.rtsp/index.html b/config.for.camera.rtsp/index.html new file mode 100644 index 0000000..ebccf3c --- /dev/null +++ b/config.for.camera.rtsp/index.html @@ -0,0 +1,1024 @@ + + + + + + + + + + + + + + + + + + + + + + + + Web Cams - RTSP streams - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Web Cam - RTSP stream

+

Caution

+

DO NOT use VLC to test streams, there are unfortunately problems with it. +Please use ffplay from ffmpeg package.

+

You have some options such as TCP or UDP stream (whatever..). +This should work with any other camera (usually there is a different port per stream)

+

You should be able to test the stream locally with ffplay command.

+

For example, if your camera is reachable over address 192.168.0.20 and port 8000 +under endpoint /stream then below command should show the stream:

+
ffplay rtsp://192.168.0.20:8000/stream
+
+

If that works, then configuration should be pretty straightforward:

+
    +
  • copy ffmpeg-mediamtx-rtsp-tcp.dist as .env
  • +
  • in copied file .env replace token-change-me with the value + of the token you copied
  • +
  • in copied file .env replace fingerprint-change-me + with some random value, which is alphanumeric and has at least 16 chars + (and max of 40 chars), for example set it to fingerprint-myprinter4-camera-4
  • +
  • in copied file .env replace your RTSP device address raspberry-pi, + port and stream id in CAMERA_COMMAND_EXTRA_PARAMS if needed
  • +
  • save edited file .env
  • +
+

You can try with UDP, but you may not get it ;-)

+

Next, test config.

+

Real world example

+

My another Rpi Zero W named hormex has two cameras:

+
    +
  • CSI
  • +
  • endoscope on /dev/video
  • +
+

and I'm running mediamtx server to conver those to RTSP streams. +More about mediamtx is here.

+

So I can have two configs:

+

.stream-csi over UDP:

+ + +
PRINTER_ADDRESS=127.0.0.1
+PRUSA_CONNECT_CAMERA_TOKEN=redacted
+PRUSA_CONNECT_CAMERA_FINGERPRINT=62e8ab72-9766-4ad5-b8b1-174d389fc0d3
+CAMERA_DEVICE=/dev/null
+CAMERA_COMMAND=ffmpeg
+CAMERA_COMMAND_EXTRA_PARAMS="-loglevel error -y -rtsp_transport udp -i "rtsp://hormex:8554/cam" -f image2 -vframes 1 -pix_fmt yuvj420p "
+
+ + +

.stream-endo over TCP:

+ +
PRINTER_ADDRESS=127.0.0.1
+PRUSA_CONNECT_CAMERA_TOKEN=redacted
+PRUSA_CONNECT_CAMERA_FINGERPRINT=01a67af8-86a3-45c7-b6e2-39e9d086b367
+CAMERA_DEVICE=/dev/null
+CAMERA_COMMAND=ffmpeg
+CAMERA_COMMAND_EXTRA_PARAMS="-loglevel error -y -rtsp_transport tcp -i "rtsp://hormex:8554/endoscope" -f image2 -vframes 1 -pix_fmt yuvj420p "
+
+ + +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/config.for.camera.snapshot/index.html b/config.for.camera.snapshot/index.html new file mode 100644 index 0000000..6a6d926 --- /dev/null +++ b/config.for.camera.snapshot/index.html @@ -0,0 +1,1060 @@ + + + + + + + + + + + + + + + + + + + + + + + + Web Cams - Snapshot - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Web Cam - snapshot

+

Some cameras expose single image snapshot under specific URL. +we can use the ultimate power of curl command to fetch the image from the camera.

+

This is the preferred way to use web cams because right now Prusa Connect do not +support streams, and thus there is no point in wasting CPU on that.

+

The best option to check what is the URL is in the camera manual, or if you +open web UI of the camera and see the still image then right click on the image +and select Inspect to see the URL for the image - copy that URL.

+

You should be able to test the stream locally with ffplay command.

+

For example, if your camera is reachable over address 192.168.0.20 and port 8001 +under endpoint /snap.jpg then below command should show the image:

+
curl -vvv http://another-cam.local:8081/snap.jpg -o snap.jpg
+
+

then you should see in the output something like Content-Type: image/jpeg, +then you are good - see snap.jpg in the folder you executed the command.

+

Create config for script

+
    +
  • copy snapshot.dist as .env
  • +
  • in copied file .env replace token-change-me with the value + of the token you copied
  • +
  • in copied file .env replace fingerprint-change-me with some + random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), + for example set it to fingerprint-myprinter3-camera-3
  • +
  • in copied file .env replace your esphome device address and port + in CAMERA_COMMAND_EXTRA_PARAMS
  • +
  • save edited file .env
  • +
+

Next, test config.

+

Real world example

+

esp32 with esphome

+

For more in-depth details see esphome snapshot.

+

I have esp32-wrover-dev board with camera + esphome + web ui for camera exposing +snapshot frame on port 8081.

+

We can use curl to fetch it.

+
PRINTER_ADDRESS=127.0.0.1
+PRUSA_CONNECT_CAMERA_TOKEN=redacted
+PRUSA_CONNECT_CAMERA_FINGERPRINT=06f47777-f179-4025-bd80-9e4cb8db2aed
+CAMERA_DEVICE=/dev/null
+CAMERA_COMMAND=curl
+CAMERA_COMMAND_EXTRA_PARAMS=http://esp32-wrover-0461c8.local:8081/ -o
+
+

Beagle Camera

+

This is not tested, I do not own such camera so hard to tell if this is right.

+

Camera URL for snapshot http://192.168.2.92/images/snapshot0.jpg so the config +should be like below:

+
PRINTER_ADDRESS=127.0.0.1
+PRUSA_CONNECT_CAMERA_TOKEN=redacted
+PRUSA_CONNECT_CAMERA_FINGERPRINT=06f47777-f179-4025-bd80-9e4cb8db2aed
+CAMERA_DEVICE=/dev/null
+CAMERA_COMMAND=curl
+CAMERA_COMMAND_EXTRA_PARAMS=http://192.168.2.92/images/snapshot0.jpg -o
+
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/config.for.camera.usb/index.html b/config.for.camera.usb/index.html new file mode 100644 index 0000000..a749cca --- /dev/null +++ b/config.for.camera.usb/index.html @@ -0,0 +1,1033 @@ + + + + + + + + + + + + + + + + + + + + + + + + USB - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+ +
+ + + +
+
+ + + + + + + + + + + +

USB camera

+

USB camera

+

This should work on any linux distro with any sane camera that you have.

+

How to get info which cameras are available?

+

Run v4l2-ctl --list-devices.

+

This should show list of devices to use, where /dev/video0 is a device +name.

+

Notice that not every device is an actual camera.

+

How to get what modes are available for the camera?

+

The quick all-in one output for camera /dev/video0 is

+
v4l2-ctl -d /dev/video0 --all
+
+

For more details about formats it is better to use +v4l2-ctl --list-formats-ext -d /dev/video0

+

Prepare config

+
    +
  • copy usb.dist as .env
  • +
  • in copied file .env replace token-change-me with the value of the token + you copied
  • +
  • in copied file .env replace fingerprint-change-me with some random value, + which is alphanumeric and has at least 16 chars (and max of 40 chars), + for example set it to fingerprint-myprinter2-camera-2
  • +
  • in copied file .env replace /dev/video0 with desired device in CAMERA_DEVICE
  • +
  • save edited file .env
  • +
+

Next, test config.

+

Real world example

+

Raspberry Pi Zero W with endoscope camera over USB, registered as /dev/video1:

+ +
PRINTER_ADDRESS=192.168.1.25
+PRUSA_CONNECT_CAMERA_TOKEN=redacted
+PRUSA_CONNECT_CAMERA_FINGERPRINT=7054ba85-bc19-4eb9-badc-6129575d9651
+CAMERA_DEVICE=/dev/video1
+CAMERA_COMMAND=fswebcam
+CAMERA_COMMAND_EXTRA_PARAMS="--resolution 1280x960 --no-banner"
+
+ + +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/config.for.camera/index.html b/config.for.camera/index.html new file mode 100644 index 0000000..4cd35e8 --- /dev/null +++ b/config.for.camera/index.html @@ -0,0 +1,1111 @@ + + + + + + + + + + + + + + + + + + + + + + + + General overview - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Create config for prusa-connect-camera-script env vars

+

Prusa Camera Token

+

PRUSA_CONNECT_CAMERA_TOKEN should be taken from earlier step.

+

Fingerprint

+

PRUSA_CONNECT_CAMERA_FINGERPRINT should be uniqe and set only once for each camera.

+

Fingerprint can be easily generated using command:

+
uuidgen
+
+

or via online website, +just copy/paste the output as fingerprint value into the config.

+

Do not change fingerprint after launching the script - thus camera is registered +and you may need to revert the change or delete and readd camera again and start +from scratch.

+

Example devices

+

Other env vars are set depending on the camera device we want to use.

+

Locally connected

+ +

Web cams

+

Generic

+ +

Specific example

+ +

Next

+

Next, test config.

+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/configuration.env.full/index.html b/configuration.env.full/index.html new file mode 100644 index 0000000..280dbc3 --- /dev/null +++ b/configuration.env.full/index.html @@ -0,0 +1,998 @@ + + + + + + + + + + + + + + + + + + + + + + + + Configuration all env vars - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Configuration Env Vars

+

Config for camera is to the script as environment variables (env vars).

+
    +
  • +

    SLEEP - sleep time in seconds between image captures, + notice that PrusaConnect accepts images at most every 10s or slower. + Default value 10.

    +
  • +
  • +

    PRINTER_ADDRESS - Printer address to ping, if address is unreachable there + is no point in sending an image. Set to 127.0.0.1 to always send images. + Set to empty value to disable ping check and always send images. + Default value 127.0.0.1

    +
  • +
  • +

    PRUSA_CONNECT_CAMERA_TOKEN - required, PrusaConnect API key

    +
  • +
  • +

    PRUSA_CONNECT_CAMERA_FINGERPRINT - required, PrusaConnect camera fingerprint, + use for example cli uuidgen or web + to generate it, it must be at least 16 alphanumeric chars, 40 max. + Remember not to change this if it was already set, otherwise you need to + remove and add the camera again.

    +
  • +
  • +

    CAMERA_DEVICE - camera device to use, if you use Raspberry Pi camera + attached to the CSI via camera ribbon then leave as is + Default /dev/video0 which points to first detected camera.

    +
  • +
  • +

    CAMERA_SETUP_COMMAND - camera setup command and params executed before + taking image, default value is empty, because some cameras do not support it, + in general you want to use something like v4l2-ctl parameters, so + so for example + setup_command=v4l2-ctl --set-ctrl brightness=10,gamma=120 -d $CAMERA_DEVICE + will translate to: + v4l2-ctl --set-ctrl brightness=10,gamma=120 -d /dev/video0

    +
  • +
  • +

    CAMERA_COMMAND - command used to invoke image capture, + default is rpicam-still + available options:

    +
  • +
  • rpicam-still - using CSI camera + modern Raspberry Pi operating systems since + Debian 11 Bullseye
  • +
  • raspistill - using CSI camera + older Raspberry Pi operating systems
  • +
  • fswebcam - using USB camera + custom package 'fswebcam'
  • +
  • +

    anything else will be processed directly, so for example you could use + 'ffmpeg' in here

    +
  • +
  • +

    CAMERA_COMMAND_EXTRA_PARAMS -extra params passed to the camera program, + passed directly as <command> <extra-params> <output_file> + example values per specific camera: + +

    +
  • +
  • +

    libcamera (rpicam-still) + --immediate --nopreview --mode 2592:1944:12:P --lores-width 0 --lores-height 0 --thumb none -o

    +
  • +
  • raspistill + --nopreview --mode 2592:1944:12:P -o
  • +
  • fswebcam + --resolution 1280x960 --no-banner
  • +
  • ffmpeg, in this case CAMERA_DEVICE is ignored, use it directly in the extra params + -f v4l2 -y -i /dev/video0 -f image2 -vframes 1 -pix_fmt yuvj420p
  • +
+ + +
    +
  • +

    TARGET_DIR - directory where to save camera images, image per camera will + be overwritten per image capture, + default value /dev/shm so that we do not write to microSD cards or read only + filesystems/containers. /dev/shm is a shared memory space. if you have more + printers you may need to increase this value on system level.

    +
  • +
  • +

    CURL_EXTRA_PARAMS - extra params to curl when pushing an image, + default empty value, but you could for example add additional params if needed + such as -k if using tls proxy with self-signed certificate

    +
  • +
  • +

    PRUSA_CONNECT_URL - Prusa Connect endpoint where to post images, + default value https://webcam.connect.prusa3d.com/c/snapshot + You could put here Prusa Connect Proxy if you use one.

    +
  • +
+

For more in-depth details (no need to repeat them here) please see the top of +the prusa-connect-camera.sh.

+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/configuration.env/index.html b/configuration.env/index.html new file mode 100644 index 0000000..37991f6 --- /dev/null +++ b/configuration.env/index.html @@ -0,0 +1,966 @@ + + + + + + + + + + + + + + + + + + + + + + + + Env vars - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Configuration Env Vars

+

Minimum required env vars

+

Config for camera is to the script as environment variables (env vars).

+

The most important env vars are:

+
    +
  • PRUSA_CONNECT_CAMERA_TOKEN
  • +
  • PRUSA_CONNECT_CAMERA_FINGERPRINT
  • +
  • CAMERA_COMMAND
  • +
  • CAMERA_COMMAND_EXTRA_PARAMS
  • +
+

Those env vars will be filled in in the next steps.

+

Full list of env vars can be seen here

+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/configuration.overview/index.html b/configuration.overview/index.html new file mode 100644 index 0000000..8803580 --- /dev/null +++ b/configuration.overview/index.html @@ -0,0 +1,920 @@ + + + + + + + + + + + + + + + + + + + + + + + + Overview - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Configuration Overview

+

Short overview of actions:

+
    +
  • ensure printer is up and running and sending status to Prusa Connect + (otherwise images will be discarded)
  • +
  • add new camera to the existing printer in Prusa Connect, + obtain token and generate fingerprint
  • +
  • create config for prusa-connect-camera-script env vars
  • +
  • test the config
  • +
  • install script as systemd service
  • +
  • tuning config
  • +
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/configuration.tuning/index.html b/configuration.tuning/index.html new file mode 100644 index 0000000..da40fbc --- /dev/null +++ b/configuration.tuning/index.html @@ -0,0 +1,1226 @@ + + + + + + + + + + + + + + + + + + + + + + + + Configuration Tuning - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Configuration tuning

+

Assuming you already have a working camera with basic setup, we can tune it further.

+

Below steps depend on the camera capabilities, thus your mileage may vary.

+

Notice that Prusa Connect has file size limit something about 8MB of the image uploaded, +so there may be no point in getting images with super high resolutions.

+

Getting higher quality camera images

+

Use v4l2-ctl to get the list of available resolutions that camera provides +and then update it in the env var configs.

+

Run v4l2-ctl --list-formats-ext -d /dev/video0 where /dev/video0 is a device +listed from command above.

+

Example output:

+
v4l2-ctl --list-formats-ext -d /dev/video1
+ioctl: VIDIOC_ENUM_FMT
+  Type: Video Capture
+
+  [0]: 'MJPG' (Motion-JPEG, compressed)
+    Size: Discrete 640x480
+      Interval: Discrete 0.033s (30.000 fps)
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 640x360
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 352x288
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 320x240
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 176x144
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 160x120
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 800x600
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 1280x720
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 1280x960
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 640x480
+      Interval: Discrete 0.033s (30.000 fps)
+      Interval: Discrete 0.033s (30.000 fps)
+  [1]: 'YUYV' (YUYV 4:2:2)
+    Size: Discrete 640x480
+      Interval: Discrete 0.033s (30.000 fps)
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 640x360
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 352x288
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 320x240
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 176x144
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 160x120
+      Interval: Discrete 0.033s (30.000 fps)
+    Size: Discrete 800x600
+      Interval: Discrete 0.200s (5.000 fps)
+    Size: Discrete 1280x720
+      Interval: Discrete 0.200s (5.000 fps)
+    Size: Discrete 1280x960
+      Interval: Discrete 0.200s (5.000 fps)
+    Size: Discrete 640x480
+      Interval: Discrete 0.033s (30.000 fps)
+      Interval: Discrete 0.033s (30.000 fps)
+
+

As you can see if I set video to YUYV and with resolution higher than 800x600 +I would get only 5 frames per second. +For still images this is not a problem, but for video streaming that could be +too low and I would have to switch to MJPG (or actually mjpeg in ffmpeg)

+

For Raspberry Cam v2 you could use csi.dist as source and add +--mode 2592:1944:12:P to the CAMERA_COMMAND_EXTRA_PARAMS.

+

For certain USB cameras (such as Tracer Endoscope) you should use usb.dist and +you should be able to add --resolution 1280x960 to the CAMERA_COMMAND_EXTRA_PARAMS.

+

Setting up video camera controls

+

Video controls are things like brightness, auto white balance (awb), +exposure and so on.

+

Get device capabilities, especially User controls:

+
v4l2-ctl -d /dev/video0 -l
+
+

and set accordingly parameters you want in CAMERA_SETUP_COMMAND env var, for example:

+
CAMERA_SETUP_COMMAND="v4l2-ctl --set-ctrl brightness=64,gamma=300 -d $CAMERA_DEVICE"
+
+

remember to restart given camera service.

+

You can try to use guvcview desktop application to check prams in realtime.

+

Image flip and rotation

+

You can pass on params to rpicam-still or fswebcam as you want.

+

rpicam-still

+

See rpicam-still --help

+
  --hflip      Read out with horizontal mirror
+  --vflip      Read out with vertical flip
+  --rotation   Use hflip and vflip to create the given rotation <angle>
+
+

so for example:

+
CAMERA_COMMAND=rpicam-still
+CAMERA_COMMAND_EXTRA_PARAMS="--rotation 90 --immediate --nopreview --thumb none -o"
+
+

fswebcam

+

See fswebcam --help

+
  --flip <direction>       Flips the image. (h, v)
+  --crop <size>[,<offset>] Crop a part of the image.
+  --scale <size>           Scales the image.
+  --rotate <angle>         Rotates the image in right angles.
+
+

so for example:

+
CAMERA_COMMAND=fswebcam
+CAMERA_COMMAND_EXTRA_PARAMS="--flip v --resolution 640x480 --no-banner"
+
+

ffmpeg

+

When curl is not enough and you don't really want to physically rotate your camera, +then use ffmpeg for post processing. +You can process static images with it, load v4l2 devices... whatever.

+

With ffmpeg you can do interesting things with filters, it will just require +more computing power.

+

Adding v4l2 options

+

v4l2 can be used as alias for video4linux2.

+

You can pass video4linux options to ffmpeg on device initialization, for example:

+ +
ffmpeg -f v4l2 -pix_fmt mjpeg -video_size 1280x960 -framerate 30 -i /dev/video1 \
+  -c:v libx264 -preset ultrafast -b:v 6000k -f rtsp rtsp://localhost:$RTSP_PORT/$MTX_PATH
+
+ + +

would instruct ffmpeg to use video4linux and force it to talk to the camera under +/dev/video1 and forcing mjpeg encoder, resolution and framerate.

+

This command above is directly taken from mediamtx.

+

For more params, see official ffmpeg docs. +Just remember to pass them before defining input (-i /dev/video1).

+

Rotation

+

See here +for basic ones.

+

You probably want to use -vf "transpose=1" to rotate image 90 degrees clockwise:

+ +
CAMERA_COMMAND=ffmpeg
+CAMERA_COMMAND_EXTRA_PARAMS="-y -i 'http://esp32-wrover-0461c8.local:8080/' -vf 'transpose=1' -vframes 1 -q:v 1 -f image2 -update 1 "
+
+ + +

Other processing

+

Frankly speaking you can do anything you want with ffmpeg, for example

+

-vf transpose=1,shufflepixels=m=block:height=16:width=16

+

Why? why not :D

+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/css/extra.css b/css/extra.css new file mode 100644 index 0000000..7f4bc84 --- /dev/null +++ b/css/extra.css @@ -0,0 +1,3 @@ +.wy-nav-content { + max-width: 100%; +} diff --git a/index.html b/index.html new file mode 100644 index 0000000..1b4887c --- /dev/null +++ b/index.html @@ -0,0 +1,510 @@ + + + + + + + + + + + +Prusa Connect Camera Script + + + + + + + + + + + + + +
+
+
+ +
+
+
+
+
+
+
+ +
+
+
+
+
+
+ +
+
+
+
+
+

Welcome to Prusa-Connect-Camera-Script

+

logo

+

This project aims to make it easier to use any camera to be used as +Prusa Connect camera.

+

Features

+
    +
  • allows to read images from CSI cameras, USB cameras, RTSP streams, still images...
  • +
  • do not send pictures if the printer is offline
  • +
  • store data in memory to prevent MicroSD wear out
  • +
  • verbose error messages to see if the image capture works
  • +
  • ability to run multiple cameras in separate instances
  • +
+

Planned

+
    +
  • run in container
  • +
+

Architecture

+
sequenceDiagram + script->>script: initial checks + script->>script: start loop + script->>camera_command: Call camera command + camera_command->>image_on_disk: camera command writes image to disk + image_on_disk->>script: script checks image from disk if exits etc + script->>script: show errors if image_on_disk is missing + script->>curl: run curl to post image to Prusa Connect API (pass image_on_disk) + curl->>image_on_disk: curl reads image from disk + curl->>PrusaConnect: send image to Prusa Connect + PrusaConnect->>curl: return response code / messages + script->>script: sleep + end loop +
+

Known limitations

+
    +
  • this script performs processing of the single camera, if you need more cameras + then just create multiple copies with different settings (see below)
  • +
  • +

    Rpi Zero W or older devices may have CPU limitations to process remote streams + or multiple cameras at once

    +
  • +
  • +

    I was not able to test EVERY setting so this may still have some bugs

    +
  • +
  • Prusa Connect will not show camera image if the printer is not alive, this is + Prusa Connect limitation.
  • +
  • default settings are quire generic and thus low camera quality, you need to adjust + them, see advanced configuration at the end
  • +
+
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + +
+
+
+
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/installation/index.html b/installation/index.html new file mode 100644 index 0000000..79a7bba --- /dev/null +++ b/installation/index.html @@ -0,0 +1,925 @@ + + + + + + + + + + + + + + + + + + + + + + + + Installation - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Installation

+

Install system packages - assuming Debian based distros on Raspberry Pi OS, which +also come in with some pre-installed packages.

+

Below commands should be executed in shell/terminal (on the Raspberry Pi).

+

For most Raspberry Pi Cameras (CSI/USB):

+
sudo apt-get update
+sudo apt-get install -y curl libcamera0 fswebcam git iputils-ping v4l-utils uuid-runtime
+
+

Additional packages for remote cameras - especially the one that are used for streaming:

+
sudo apt-get install -y ffmpeg
+
+

Download this script:

+
mkdir -p /home/pi/src
+cd /home/pi/src
+git clone https://github.com/nvtkaszpir/prusa-connect-camera-script.git
+cd prusa-connect-camera-script
+
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/performance/index.html b/performance/index.html new file mode 100644 index 0000000..e8ba419 --- /dev/null +++ b/performance/index.html @@ -0,0 +1,925 @@ + + + + + + + + + + + + + + + + + + + + + + + + Performance - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Performance

+
    +
  • +

    Raspberry Pi Zero W is able to process CSI camera + (Rpi Cam v2) and USB 2k camera + but it has load average about 1.4, and CPU is quite well utilized, so you may + need to decrease resolution per camera to see how + it goes.

    +
  • +
  • +

    for webcams it is always better to choose snapshot + because it requires less computing both on camera and on the host, + otherwise we need to use ffmpeg

    +
  • +
  • +

    ffmpeg is usually noticeably slow and cpu intensive, especially if you do more + complex operations

    +
  • +
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/prusa.connect/index.html b/prusa.connect/index.html new file mode 100644 index 0000000..91ea0ec --- /dev/null +++ b/prusa.connect/index.html @@ -0,0 +1,917 @@ + + + + + + + + + + + + + + + + + + + + + + + + Add camera to Prusa Connect - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Create new camera in the Prusa Connect

+
    +
  • go to Prusa Connect and log in
  • +
  • select Printer
  • +
  • select Camera
  • +
  • on the bottom click Add new other camera
  • +
  • new camera is created, copy Token, this is needed later as + PRUSA_CONNECT_CAMERA_TOKEN env var
  • +
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/requirements/index.html b/requirements/index.html new file mode 100644 index 0000000..4ee31d3 --- /dev/null +++ b/requirements/index.html @@ -0,0 +1,1039 @@ + + + + + + + + + + + + + + + + + + + + + + + + Requirements - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Requirements

+

Hardware

+

Physical host or virtual machine or container:

+
    +
  • probably something like Raspberry Pi Zero W at least, can be without camera
  • +
  • more cameras usually requires more compute power
  • +
+

Camera such as:

+ + + +
    +
  • most of USB cameras if they work under Linux
  • +
  • esphome cameras using esp32_camera_web_server with snapshot module
  • +
  • esphome cameras using esp32_camera_web_server with stream module using ffmpeg
  • +
  • probably any camera if using ffmpeg
  • +
+

Software

+

Linux operating system. +Debian based preferred, for example Raspberry Pi OS Lite if you run Raspberry Pi. +I use also laptop with Ubuntu 22.04, but I believe with minor tweaks it should +work on most distributions (mainly package names are different).

+

Below list uses Debian package names.

+

Generic system packages

+
    +
  • bash 5.x (what year is it?)
  • +
  • git (just to install scripts from this repo)
  • +
  • curl
  • +
  • iputils-ping
  • +
  • uuid-runtime to make generation of camera fingerprint easier
  • +
+

Optional packages

+
    +
  • v4l-utils - to detect camera capabilities
  • +
  • libcamera0 - for Rpi CSI cameras
  • +
  • libraspberrypi-bin or rpicam-apps-lite for Rpi CSI cameras + (should be already installed on Rpi OS)
  • +
  • fswebcam - for generic USB cameras
  • +
  • ffmpeg - for custom commands for capturing remote streams
  • +
  • you-name-it - for custom commands beyond my imagination
  • +
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/search/search_index.json b/search/search_index.json new file mode 100644 index 0000000..26f3519 --- /dev/null +++ b/search/search_index.json @@ -0,0 +1 @@ +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Welcome to Prusa-Connect-Camera-Script","text":"

This project aims to make it easier to use any camera to be used as Prusa Connect camera.

"},{"location":"#features","title":"Features","text":"
  • allows to read images from CSI cameras, USB cameras, RTSP streams, still images...
  • do not send pictures if the printer is offline
  • store data in memory to prevent MicroSD wear out
  • verbose error messages to see if the image capture works
  • ability to run multiple cameras in separate instances
"},{"location":"#planned","title":"Planned","text":"
  • run in container
"},{"location":"#architecture","title":"Architecture","text":"sequenceDiagram script->>script: initial checks script->>script: start loop script->>camera_command: Call camera command camera_command->>image_on_disk: camera command writes image to disk image_on_disk->>script: script checks image from disk if exits etc script->>script: show errors if image_on_disk is missing script->>curl: run curl to post image to Prusa Connect API (pass image_on_disk) curl->>image_on_disk: curl reads image from disk curl->>PrusaConnect: send image to Prusa Connect PrusaConnect->>curl: return response code / messages script->>script: sleep + end loop"},{"location":"#known-limitations","title":"Known limitations","text":"
  • this script performs processing of the single camera, if you need more cameras then just create multiple copies with different settings (see below)
  • Rpi Zero W or older devices may have CPU limitations to process remote streams or multiple cameras at once

  • I was not able to test EVERY setting so this may still have some bugs

  • Prusa Connect will not show camera image if the printer is not alive, this is Prusa Connect limitation.
  • default settings are quire generic and thus low camera quality, you need to adjust them, see advanced configuration at the end
"},{"location":"config.for.camera.csi.legacy/","title":"CSI camera on Raspberry Pi (legacy)","text":"

CSI Camera V2 as of Sat 23 Mar 08:47:12 UTC 2024.

Example for older operating systems (those with command raspistill):

  • copy csi-legacy.dist as .env if you want to use Raspberry Pi camera
  • in copied file .env replace token-change-me with the value of the token you copied
  • in copied file .env replace fingerprint-change-me with some random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), for example set it to fingerprint-myprinter-camera-1
  • save edited file .env

Next, test config.

"},{"location":"config.for.camera.csi.legacy/#real-world-scenario","title":"Real world scenario","text":"

Some older Rpi 3 with older Debian with basic cam:

PRINTER_ADDRESS=127.0.0.1\nPRUSA_CONNECT_CAMERA_TOKEN=token-change-me\nPRUSA_CONNECT_CAMERA_FINGERPRINT=trash-cam-night-video-wide-1\nCAMERA_DEVICE=/dev/video0\nCAMERA_COMMAND=raspistill\nCAMERA_COMMAND_EXTRA_PARAMS=\"--nopreview --mode 640:480 -o\"\n
"},{"location":"config.for.camera.csi.libcamera/","title":"CSI camera on Raspberry Pi","text":"

CSI Camera V2 as of Sat 23 Mar 08:47:12 UTC 2024.

Example for newer operating systems (commands libcamera or rpicam-still):

  • copy csi.dist as .env if you want to use Raspberry Pi camera
  • in copied file .env replace token-change-me with the value of the token you copied
  • in copied file .env replace fingerprint-change-me with some random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), for example set it to fingerprint-myprinter-camera-1
  • save edited file .env

Next, test config.

"},{"location":"config.for.camera.csi.libcamera/#real-example","title":"Real example","text":"

My Rpi Zero W with Raspberry Pi Camera v2 with maximum resolution available:

PRINTER_ADDRESS=192.168.1.25\nPRUSA_CONNECT_CAMERA_TOKEN=redacted\nPRUSA_CONNECT_CAMERA_FINGERPRINT=c10eb887-f107-41a4-900e-2c38ea12a11c\nCAMERA_DEVICE=/dev/video0\nCAMERA_COMMAND=rpicam-still\nCAMERA_COMMAND_EXTRA_PARAMS=\"--immediate --nopreview --mode 2592:1944:12:P --lores-width 0 --lores-height 0 --thumb none -o\"\n
"},{"location":"config.for.camera.esphome.snapshot/","title":"ESPHome camera snapshot","text":"

With esphome camera with snapshot we can use the ultimate power of curl command to fetch the image from the camera.

"},{"location":"config.for.camera.esphome.snapshot/#prepare-esphome-device","title":"Prepare esphome device","text":"

Configure esphome device:

  • install esphome camera on the device and add esp32_camera and esp32_camera_web_server with snapshot modules:
esp32_camera:\n... (skipped due to the fact there are different modules)\n\nesp32_camera_web_server:\n  - port: 8081\n    mode: snapshot\n

Flash the device and wait until it boots and is available.

"},{"location":"config.for.camera.esphome.snapshot/#create-config-for-script","title":"Create config for script","text":"
  • copy esphome-snapshot.dist as .env
  • in copied file .env replace token-change-me with the value of the token you copied
  • in copied file .env replace fingerprint-change-me with some random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), for example set it to fingerprint-myprinter3-camera-3
  • in copied file .env replace your esphome device address and port in CAMERA_COMMAND_EXTRA_PARAMS
  • save edited file .env

Next, test config.

"},{"location":"config.for.camera.esphome.snapshot/#real-world-example","title":"Real world example","text":"

I have esp32-wrover-dev board with camera + esphome + web ui for camera exposing snapshot frame on port 8081.

We can use curl to fetch it.

PRINTER_ADDRESS=127.0.0.1\nPRUSA_CONNECT_CAMERA_TOKEN=redacted\nPRUSA_CONNECT_CAMERA_FINGERPRINT=06f47777-f179-4025-bd80-9e4cb8db2aed\nCAMERA_DEVICE=/dev/null\nCAMERA_COMMAND=curl\nCAMERA_COMMAND_EXTRA_PARAMS=http://esp32-wrover-0461c8.local:8081/ -o\n
"},{"location":"config.for.camera.esphome.stream/","title":"ESPHome camera stream","text":"

With esphome camera stream we can use the ffmpeg to fetch the image from the camera stream. It requires a bit more computing power from esp device and the host that runs the image processing.

Notice that this is not recommended way due to the amount of consumed resources.

"},{"location":"config.for.camera.esphome.stream/#prepare-esphome-device","title":"Prepare esphome device","text":"

Configure esphome device:

  • install esphome camera on the device and add esp32_camera and esp32_camera_web_server with stream modules:
esp32_camera:\n... (skipped due to the fact there are different modules)\n\nesp32_camera_web_server:\n  - port: 8080\n    mode: stream\n

Flash the device and wait until it boots and is available.

"},{"location":"config.for.camera.esphome.stream/#create-config-for-script","title":"Create config for script","text":"
  • copy esphome-stream.dist as .env
  • in copied file .env replace token-change-me with the value of the token you copied
  • in copied file .env replace fingerprint-change-me with some random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), for example set it to fingerprint-myprinter3-camera-3
  • in copied file .env replace your esphome device address and port in CAMERA_COMMAND_EXTRA_PARAMS
  • notice that -update 1 may not be needed in certain ffmpeg versions
  • save edited file .env

Next, test config.

"},{"location":"config.for.camera.esphome.stream/#real-world-example","title":"Real world example","text":"

The same ESP device with stream, notice different port (8080).

PRINTER_ADDRESS=127.0.0.1\nPRUSA_CONNECT_CAMERA_TOKEN=token-change-me\nPRUSA_CONNECT_CAMERA_FINGERPRINT=f68336b-8dab-42cd-8729-6abd8855ff63\nCAMERA_DEVICE=/dev/null\nCAMERA_COMMAND=ffmpeg\nCAMERA_COMMAND_EXTRA_PARAMS=\"-y -i 'http://esp32-wrover-0461c8.local:8080/' -vframes 1 -q:v 1 -f image2 -update 1 \"\n
"},{"location":"config.for.camera/","title":"Create config for prusa-connect-camera-script env vars","text":""},{"location":"config.for.camera/#prusa-camera-token","title":"Prusa Camera Token","text":"

PRUSA_CONNECT_CAMERA_TOKEN should be taken from earlier step.

"},{"location":"config.for.camera/#fingerprint","title":"Fingerprint","text":"

PRUSA_CONNECT_CAMERA_FINGERPRINT should be uniqe and set only once for each camera.

Fingerprint can be easily generated using command:

uuidgen\n

or via online website, just copy/paste the output as fingerprint value into the config.

Do not change fingerprint after launching the script - thus camera is registered and you may need to revert the change or delete and readd camera again and start from scratch.

"},{"location":"config.for.camera/#example-devices","title":"Example devices","text":"

Other env vars are set depending on the camera device we want to use.

"},{"location":"config.for.camera/#locally-connected","title":"Locally connected","text":"
  • Raspberry Pi CSI camera - libcamera (recommended)
  • Raspberry Pi CSI camera - legacy
  • USB camera
"},{"location":"config.for.camera/#web-cams","title":"Web cams","text":""},{"location":"config.for.camera/#generic","title":"Generic","text":"
  • Snapshot cams (recommended)
  • MJPG streaming cams
  • RTSP streaming cams
"},{"location":"config.for.camera/#specific-example","title":"Specific example","text":"
  • ESPHome via camera snapshot (recommended)
  • ESPHome via camera stream
"},{"location":"config.for.camera/#next","title":"Next","text":"

Next, test config.

"},{"location":"config.for.camera.mjpg/","title":"Web Cam - MJPG stream","text":"

This processing requires ffmpeg package.

Most standalone webcams are actually mjpg cams, they send infinite motion jpeg stream over specific URL.

The best option to check what is the URL is in the camera manual, or if you open web UI of the camera and see the stream image then right click on the image and select Inspect to see the URL for the image - copy that URL.

You should be able to test the stream locally with ffplay command.

For example, if your camera is reachable over address 192.168.0.20 and port 8000 under endpoint /ipcam/mjpeg.cgi then below command should show the stream:

ffplay http://192.168.0.20:8000/ipcam/mjpeg.cgi\n

There may be some user and password in the URL.

If that works, then configuration should be pretty straightforward:

  • copy ffmpeg-mjpg-stream.dist as .env
  • in copied file .env replace token-change-me with the value of the token you copied
  • in copied file .env replace fingerprint-change-me with some random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), for example set it to fingerprint-myprinter4-camera-4
  • in copied file .env replace your RTSP device address raspberry-pi, port and stream id in CAMERA_COMMAND_EXTRA_PARAMS if needed
  • save edited file .env

Next, test config.

"},{"location":"config.for.camera.mjpg/#unverified-example","title":"Unverified example","text":"

Beagle Camera stream - if I remember correctly, then camera url to the stream is something like http://192.168.2.92/ipcam/mjpeg.cgi

Replace 192.168.2.92 with your address in the example below.

PRINTER_ADDRESS=127.0.0.1\nPRUSA_CONNECT_CAMERA_TOKEN=token-change-me\nPRUSA_CONNECT_CAMERA_FINGERPRINT=fingerprint-change-me\nCAMERA_DEVICE=/dev/null\nCAMERA_COMMAND=ffmpeg\nCAMERA_COMMAND_EXTRA_PARAMS=\"-y -i 'http://192.168.2.92/ipcam/mjpeg.cgi' -vframes 1 -q:v 1 -f image2 -update 1 \"\n

But it is better to use a snapshot instead of stream if available, see here.

"},{"location":"config.for.camera.rtsp/","title":"Web Cam - RTSP stream","text":""},{"location":"config.for.camera.rtsp/#caution","title":"Caution","text":"

DO NOT use VLC to test streams, there are unfortunately problems with it. Please use ffplay from ffmpeg package.

You have some options such as TCP or UDP stream (whatever..). This should work with any other camera (usually there is a different port per stream)

You should be able to test the stream locally with ffplay command.

For example, if your camera is reachable over address 192.168.0.20 and port 8000 under endpoint /stream then below command should show the stream:

ffplay rtsp://192.168.0.20:8000/stream\n

If that works, then configuration should be pretty straightforward:

  • copy ffmpeg-mediamtx-rtsp-tcp.dist as .env
  • in copied file .env replace token-change-me with the value of the token you copied
  • in copied file .env replace fingerprint-change-me with some random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), for example set it to fingerprint-myprinter4-camera-4
  • in copied file .env replace your RTSP device address raspberry-pi, port and stream id in CAMERA_COMMAND_EXTRA_PARAMS if needed
  • save edited file .env

You can try with UDP, but you may not get it ;-)

Next, test config.

"},{"location":"config.for.camera.rtsp/#real-world-example","title":"Real world example","text":"

My another Rpi Zero W named hormex has two cameras:

  • CSI
  • endoscope on /dev/video

and I'm running mediamtx server to conver those to RTSP streams. More about mediamtx is here.

So I can have two configs:

.stream-csi over UDP:

PRINTER_ADDRESS=127.0.0.1\nPRUSA_CONNECT_CAMERA_TOKEN=redacted\nPRUSA_CONNECT_CAMERA_FINGERPRINT=62e8ab72-9766-4ad5-b8b1-174d389fc0d3\nCAMERA_DEVICE=/dev/null\nCAMERA_COMMAND=ffmpeg\nCAMERA_COMMAND_EXTRA_PARAMS=\"-loglevel error -y -rtsp_transport udp -i \"rtsp://hormex:8554/cam\" -f image2 -vframes 1 -pix_fmt yuvj420p \"\n

.stream-endo over TCP:

PRINTER_ADDRESS=127.0.0.1\nPRUSA_CONNECT_CAMERA_TOKEN=redacted\nPRUSA_CONNECT_CAMERA_FINGERPRINT=01a67af8-86a3-45c7-b6e2-39e9d086b367\nCAMERA_DEVICE=/dev/null\nCAMERA_COMMAND=ffmpeg\nCAMERA_COMMAND_EXTRA_PARAMS=\"-loglevel error -y -rtsp_transport tcp -i \"rtsp://hormex:8554/endoscope\" -f image2 -vframes 1 -pix_fmt yuvj420p \"\n
"},{"location":"config.for.camera.snapshot/","title":"Web Cam - snapshot","text":"

Some cameras expose single image snapshot under specific URL. we can use the ultimate power of curl command to fetch the image from the camera.

This is the preferred way to use web cams because right now Prusa Connect do not support streams, and thus there is no point in wasting CPU on that.

The best option to check what is the URL is in the camera manual, or if you open web UI of the camera and see the still image then right click on the image and select Inspect to see the URL for the image - copy that URL.

You should be able to test the stream locally with ffplay command.

For example, if your camera is reachable over address 192.168.0.20 and port 8001 under endpoint /snap.jpg then below command should show the image:

curl -vvv http://another-cam.local:8081/snap.jpg -o snap.jpg\n

then you should see in the output something like Content-Type: image/jpeg, then you are good - see snap.jpg in the folder you executed the command.

"},{"location":"config.for.camera.snapshot/#create-config-for-script","title":"Create config for script","text":"
  • copy snapshot.dist as .env
  • in copied file .env replace token-change-me with the value of the token you copied
  • in copied file .env replace fingerprint-change-me with some random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), for example set it to fingerprint-myprinter3-camera-3
  • in copied file .env replace your esphome device address and port in CAMERA_COMMAND_EXTRA_PARAMS
  • save edited file .env

Next, test config.

"},{"location":"config.for.camera.snapshot/#real-world-example","title":"Real world example","text":""},{"location":"config.for.camera.snapshot/#esp32-with-esphome","title":"esp32 with esphome","text":"

For more in-depth details see esphome snapshot.

I have esp32-wrover-dev board with camera + esphome + web ui for camera exposing snapshot frame on port 8081.

We can use curl to fetch it.

PRINTER_ADDRESS=127.0.0.1\nPRUSA_CONNECT_CAMERA_TOKEN=redacted\nPRUSA_CONNECT_CAMERA_FINGERPRINT=06f47777-f179-4025-bd80-9e4cb8db2aed\nCAMERA_DEVICE=/dev/null\nCAMERA_COMMAND=curl\nCAMERA_COMMAND_EXTRA_PARAMS=http://esp32-wrover-0461c8.local:8081/ -o\n
"},{"location":"config.for.camera.snapshot/#beagle-camera","title":"Beagle Camera","text":"

This is not tested, I do not own such camera so hard to tell if this is right.

Camera URL for snapshot http://192.168.2.92/images/snapshot0.jpg so the config should be like below:

PRINTER_ADDRESS=127.0.0.1\nPRUSA_CONNECT_CAMERA_TOKEN=redacted\nPRUSA_CONNECT_CAMERA_FINGERPRINT=06f47777-f179-4025-bd80-9e4cb8db2aed\nCAMERA_DEVICE=/dev/null\nCAMERA_COMMAND=curl\nCAMERA_COMMAND_EXTRA_PARAMS=http://192.168.2.92/images/snapshot0.jpg -o\n
"},{"location":"config.for.camera.usb/","title":"USB camera","text":"

This should work on any linux distro with any sane camera that you have.

"},{"location":"config.for.camera.usb/#how-to-get-info-which-cameras-are-available","title":"How to get info which cameras are available?","text":"

Run v4l2-ctl --list-devices.

This should show list of devices to use, where /dev/video0 is a device name.

Notice that not every device is an actual camera.

"},{"location":"config.for.camera.usb/#how-to-get-what-modes-are-available-for-the-camera","title":"How to get what modes are available for the camera?","text":"

The quick all-in one output for camera /dev/video0 is

v4l2-ctl -d /dev/video0 --all\n

For more details about formats it is better to use v4l2-ctl --list-formats-ext -d /dev/video0

"},{"location":"config.for.camera.usb/#prepare-config","title":"Prepare config","text":"
  • copy usb.dist as .env
  • in copied file .env replace token-change-me with the value of the token you copied
  • in copied file .env replace fingerprint-change-me with some random value, which is alphanumeric and has at least 16 chars (and max of 40 chars), for example set it to fingerprint-myprinter2-camera-2
  • in copied file .env replace /dev/video0 with desired device in CAMERA_DEVICE
  • save edited file .env

Next, test config.

"},{"location":"config.for.camera.usb/#real-world-example","title":"Real world example","text":"

Raspberry Pi Zero W with endoscope camera over USB, registered as /dev/video1:

PRINTER_ADDRESS=192.168.1.25\nPRUSA_CONNECT_CAMERA_TOKEN=redacted\nPRUSA_CONNECT_CAMERA_FINGERPRINT=7054ba85-bc19-4eb9-badc-6129575d9651\nCAMERA_DEVICE=/dev/video1\nCAMERA_COMMAND=fswebcam\nCAMERA_COMMAND_EXTRA_PARAMS=\"--resolution 1280x960 --no-banner\"\n
"},{"location":"configuration.env.full/","title":"Configuration Env Vars","text":"

Config for camera is to the script as environment variables (env vars).

  • SLEEP - sleep time in seconds between image captures, notice that PrusaConnect accepts images at most every 10s or slower. Default value 10.

  • PRINTER_ADDRESS - Printer address to ping, if address is unreachable there is no point in sending an image. Set to 127.0.0.1 to always send images. Set to empty value to disable ping check and always send images. Default value 127.0.0.1

  • PRUSA_CONNECT_CAMERA_TOKEN - required, PrusaConnect API key

  • PRUSA_CONNECT_CAMERA_FINGERPRINT - required, PrusaConnect camera fingerprint, use for example cli uuidgen or web to generate it, it must be at least 16 alphanumeric chars, 40 max. Remember not to change this if it was already set, otherwise you need to remove and add the camera again.

  • CAMERA_DEVICE - camera device to use, if you use Raspberry Pi camera attached to the CSI via camera ribbon then leave as is Default /dev/video0 which points to first detected camera.

  • CAMERA_SETUP_COMMAND - camera setup command and params executed before taking image, default value is empty, because some cameras do not support it, in general you want to use something like v4l2-ctl parameters, so so for example setup_command=v4l2-ctl --set-ctrl brightness=10,gamma=120 -d $CAMERA_DEVICE will translate to: v4l2-ctl --set-ctrl brightness=10,gamma=120 -d /dev/video0

  • CAMERA_COMMAND - command used to invoke image capture, default is rpicam-still available options:

  • rpicam-still - using CSI camera + modern Raspberry Pi operating systems since Debian 11 Bullseye
  • raspistill - using CSI camera + older Raspberry Pi operating systems
  • fswebcam - using USB camera + custom package 'fswebcam'
  • anything else will be processed directly, so for example you could use 'ffmpeg' in here

  • CAMERA_COMMAND_EXTRA_PARAMS -extra params passed to the camera program, passed directly as <command> <extra-params> <output_file> example values per specific camera:

  • libcamera (rpicam-still) --immediate --nopreview --mode 2592:1944:12:P --lores-width 0 --lores-height 0 --thumb none -o

  • raspistill --nopreview --mode 2592:1944:12:P -o
  • fswebcam --resolution 1280x960 --no-banner
  • ffmpeg, in this case CAMERA_DEVICE is ignored, use it directly in the extra params -f v4l2 -y -i /dev/video0 -f image2 -vframes 1 -pix_fmt yuvj420p
  • TARGET_DIR - directory where to save camera images, image per camera will be overwritten per image capture, default value /dev/shm so that we do not write to microSD cards or read only filesystems/containers. /dev/shm is a shared memory space. if you have more printers you may need to increase this value on system level.

  • CURL_EXTRA_PARAMS - extra params to curl when pushing an image, default empty value, but you could for example add additional params if needed such as -k if using tls proxy with self-signed certificate

  • PRUSA_CONNECT_URL - Prusa Connect endpoint where to post images, default value https://webcam.connect.prusa3d.com/c/snapshot You could put here Prusa Connect Proxy if you use one.

For more in-depth details (no need to repeat them here) please see the top of the prusa-connect-camera.sh.

"},{"location":"configuration.env/","title":"Configuration Env Vars","text":""},{"location":"configuration.env/#minimum-required-env-vars","title":"Minimum required env vars","text":"

Config for camera is to the script as environment variables (env vars).

The most important env vars are:

  • PRUSA_CONNECT_CAMERA_TOKEN
  • PRUSA_CONNECT_CAMERA_FINGERPRINT
  • CAMERA_COMMAND
  • CAMERA_COMMAND_EXTRA_PARAMS

Those env vars will be filled in in the next steps.

Full list of env vars can be seen here

"},{"location":"configuration.overview/","title":"Configuration Overview","text":"

Short overview of actions:

  • ensure printer is up and running and sending status to Prusa Connect (otherwise images will be discarded)
  • add new camera to the existing printer in Prusa Connect, obtain token and generate fingerprint
  • create config for prusa-connect-camera-script env vars
  • test the config
  • install script as systemd service
  • tuning config
"},{"location":"configuration.tuning/","title":"Configuration tuning","text":"

Assuming you already have a working camera with basic setup, we can tune it further.

Below steps depend on the camera capabilities, thus your mileage may vary.

Notice that Prusa Connect has file size limit something about 8MB of the image uploaded, so there may be no point in getting images with super high resolutions.

"},{"location":"configuration.tuning/#getting-higher-quality-camera-images","title":"Getting higher quality camera images","text":"

Use v4l2-ctl to get the list of available resolutions that camera provides and then update it in the env var configs.

Run v4l2-ctl --list-formats-ext -d /dev/video0 where /dev/video0 is a device listed from command above.

Example output:

v4l2-ctl --list-formats-ext -d /dev/video1\nioctl: VIDIOC_ENUM_FMT\n  Type: Video Capture\n\n  [0]: 'MJPG' (Motion-JPEG, compressed)\n    Size: Discrete 640x480\n      Interval: Discrete 0.033s (30.000 fps)\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 640x360\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 352x288\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 320x240\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 176x144\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 160x120\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 800x600\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 1280x720\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 1280x960\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 640x480\n      Interval: Discrete 0.033s (30.000 fps)\n      Interval: Discrete 0.033s (30.000 fps)\n  [1]: 'YUYV' (YUYV 4:2:2)\n    Size: Discrete 640x480\n      Interval: Discrete 0.033s (30.000 fps)\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 640x360\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 352x288\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 320x240\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 176x144\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 160x120\n      Interval: Discrete 0.033s (30.000 fps)\n    Size: Discrete 800x600\n      Interval: Discrete 0.200s (5.000 fps)\n    Size: Discrete 1280x720\n      Interval: Discrete 0.200s (5.000 fps)\n    Size: Discrete 1280x960\n      Interval: Discrete 0.200s (5.000 fps)\n    Size: Discrete 640x480\n      Interval: Discrete 0.033s (30.000 fps)\n      Interval: Discrete 0.033s (30.000 fps)\n

As you can see if I set video to YUYV and with resolution higher than 800x600 I would get only 5 frames per second. For still images this is not a problem, but for video streaming that could be too low and I would have to switch to MJPG (or actually mjpeg in ffmpeg)

For Raspberry Cam v2 you could use csi.dist as source and add --mode 2592:1944:12:P to the CAMERA_COMMAND_EXTRA_PARAMS.

For certain USB cameras (such as Tracer Endoscope) you should use usb.dist and you should be able to add --resolution 1280x960 to the CAMERA_COMMAND_EXTRA_PARAMS.

"},{"location":"configuration.tuning/#setting-up-video-camera-controls","title":"Setting up video camera controls","text":"

Video controls are things like brightness, auto white balance (awb), exposure and so on.

Get device capabilities, especially User controls:

v4l2-ctl -d /dev/video0 -l\n

and set accordingly parameters you want in CAMERA_SETUP_COMMAND env var, for example:

CAMERA_SETUP_COMMAND=\"v4l2-ctl --set-ctrl brightness=64,gamma=300 -d $CAMERA_DEVICE\"\n

remember to restart given camera service.

You can try to use guvcview desktop application to check prams in realtime.

"},{"location":"configuration.tuning/#image-flip-and-rotation","title":"Image flip and rotation","text":"

You can pass on params to rpicam-still or fswebcam as you want.

"},{"location":"configuration.tuning/#rpicam-still","title":"rpicam-still","text":"

See rpicam-still --help

  --hflip      Read out with horizontal mirror\n  --vflip      Read out with vertical flip\n  --rotation   Use hflip and vflip to create the given rotation <angle>\n

so for example:

CAMERA_COMMAND=rpicam-still\nCAMERA_COMMAND_EXTRA_PARAMS=\"--rotation 90 --immediate --nopreview --thumb none -o\"\n
"},{"location":"configuration.tuning/#fswebcam","title":"fswebcam","text":"

See fswebcam --help

  --flip <direction>       Flips the image. (h, v)\n  --crop <size>[,<offset>] Crop a part of the image.\n  --scale <size>           Scales the image.\n  --rotate <angle>         Rotates the image in right angles.\n

so for example:

CAMERA_COMMAND=fswebcam\nCAMERA_COMMAND_EXTRA_PARAMS=\"--flip v --resolution 640x480 --no-banner\"\n
"},{"location":"configuration.tuning/#ffmpeg","title":"ffmpeg","text":"

When curl is not enough and you don't really want to physically rotate your camera, then use ffmpeg for post processing. You can process static images with it, load v4l2 devices... whatever.

With ffmpeg you can do interesting things with filters, it will just require more computing power.

"},{"location":"configuration.tuning/#adding-v4l2-options","title":"Adding v4l2 options","text":"

v4l2 can be used as alias for video4linux2.

You can pass video4linux options to ffmpeg on device initialization, for example:

ffmpeg -f v4l2 -pix_fmt mjpeg -video_size 1280x960 -framerate 30 -i /dev/video1 \\\n  -c:v libx264 -preset ultrafast -b:v 6000k -f rtsp rtsp://localhost:$RTSP_PORT/$MTX_PATH\n

would instruct ffmpeg to use video4linux and force it to talk to the camera under /dev/video1 and forcing mjpeg encoder, resolution and framerate.

This command above is directly taken from mediamtx.

For more params, see official ffmpeg docs. Just remember to pass them before defining input (-i /dev/video1).

"},{"location":"configuration.tuning/#rotation","title":"Rotation","text":"

See here for basic ones.

You probably want to use -vf \"transpose=1\" to rotate image 90 degrees clockwise:

CAMERA_COMMAND=ffmpeg\nCAMERA_COMMAND_EXTRA_PARAMS=\"-y -i 'http://esp32-wrover-0461c8.local:8080/' -vf 'transpose=1' -vframes 1 -q:v 1 -f image2 -update 1 \"\n
"},{"location":"configuration.tuning/#other-processing","title":"Other processing","text":"

Frankly speaking you can do anything you want with ffmpeg, for example

-vf transpose=1,shufflepixels=m=block:height=16:width=16

Why? why not :D

"},{"location":"installation/","title":"Installation","text":"

Install system packages - assuming Debian based distros on Raspberry Pi OS, which also come in with some pre-installed packages.

Below commands should be executed in shell/terminal (on the Raspberry Pi).

For most Raspberry Pi Cameras (CSI/USB):

sudo apt-get update\nsudo apt-get install -y curl libcamera0 fswebcam git iputils-ping v4l-utils uuid-runtime\n

Additional packages for remote cameras - especially the one that are used for streaming:

sudo apt-get install -y ffmpeg\n

Download this script:

mkdir -p /home/pi/src\ncd /home/pi/src\ngit clone https://github.com/nvtkaszpir/prusa-connect-camera-script.git\ncd prusa-connect-camera-script\n
"},{"location":"performance/","title":"Performance","text":"
  • Raspberry Pi Zero W is able to process CSI camera (Rpi Cam v2) and USB 2k camera but it has load average about 1.4, and CPU is quite well utilized, so you may need to decrease resolution per camera to see how it goes.

  • for webcams it is always better to choose snapshot because it requires less computing both on camera and on the host, otherwise we need to use ffmpeg

  • ffmpeg is usually noticeably slow and cpu intensive, especially if you do more complex operations

"},{"location":"prusa.connect/","title":"Create new camera in the Prusa Connect","text":"
  • go to Prusa Connect and log in
  • select Printer
  • select Camera
  • on the bottom click Add new other camera
  • new camera is created, copy Token, this is needed later as PRUSA_CONNECT_CAMERA_TOKEN env var
"},{"location":"requirements/","title":"Requirements","text":""},{"location":"requirements/#hardware","title":"Hardware","text":"

Physical host or virtual machine or container:

  • probably something like Raspberry Pi Zero W at least, can be without camera
  • more cameras usually requires more compute power

Camera such as:

  • Raspberry Pi CSI cameras such as Raspberry Pi Cam
  • most of USB cameras if they work under Linux
  • esphome cameras using esp32_camera_web_server with snapshot module
  • esphome cameras using esp32_camera_web_server with stream module using ffmpeg
  • probably any camera if using ffmpeg
"},{"location":"requirements/#software","title":"Software","text":"

Linux operating system. Debian based preferred, for example Raspberry Pi OS Lite if you run Raspberry Pi. I use also laptop with Ubuntu 22.04, but I believe with minor tweaks it should work on most distributions (mainly package names are different).

Below list uses Debian package names.

"},{"location":"requirements/#generic-system-packages","title":"Generic system packages","text":"
  • bash 5.x (what year is it?)
  • git (just to install scripts from this repo)
  • curl
  • iputils-ping
  • uuid-runtime to make generation of camera fingerprint easier
"},{"location":"requirements/#optional-packages","title":"Optional packages","text":"
  • v4l-utils - to detect camera capabilities
  • libcamera0 - for Rpi CSI cameras
  • libraspberrypi-bin or rpicam-apps-lite for Rpi CSI cameras (should be already installed on Rpi OS)
  • fswebcam - for generic USB cameras
  • ffmpeg - for custom commands for capturing remote streams
  • you-name-it - for custom commands beyond my imagination
"},{"location":"service.docker/","title":"Install script as docker container","text":"

You can run the app as container.

Multi-platform images are available at quay.io/kaszpir/prusa-connect-script.

Currently available platforms:

  • linux/amd64 (64bit)
  • linux/arm64 (64bit)
  • linux/arm/v7 (32bit)
"},{"location":"service.docker/#preparation-of-the-host","title":"Preparation of the host","text":"

Install docker on Debian.

Optional - you may want to make sure current user is in docker group so it is possible to run containers without using sudo:

sudo usermod -a -G docker $(whoami)\n

logout and login again, or reboot Raspberry Pi.

"},{"location":"service.docker/#preparation-of-env-files-for-docker-command","title":"Preparation of env files for docker command","text":"

Notice - you do not have to do it if you use docker-compose.

If you use docker command directly you need to edit env files and remove quotation marks from the files (this is a limitation of the Docker)

For example:

CAMERA_COMMAND_EXTRA_PARAMS=\"--immediate --nopreview --thumb none -o\"\n

becomes

CAMERA_COMMAND_EXTRA_PARAMS=--immediate --nopreview --thumb none -o\n
"},{"location":"service.docker/#raspberry-pi-csi-or-usb-camera","title":"Raspberry Pi CSI or USB camera","text":"

We assume that .csi is a env file with example variables after edit, it is possible to run below command and have screenshots sent to the Prusa Connect.

docker run --env-file .csi -v /run/udev:/run/udev:ro -v /dev/:/dev/ --device /dev:/dev --read-only quay.io/kaszpir/prusa-connect-script:03c4886\n
"},{"location":"service.docker/#raspberry-pi-and-remote-cams","title":"Raspberry Pi and remote cams","text":"

If you use remote camera you can make command even shorter:

docker run --env-file .esp32 --read-only quay.io/kaszpir/prusa-connect-script:03c4886\n
"},{"location":"service.docker/#other-examples","title":"Other examples","text":"
docker run --env-file .docker-csi --device /dev:/dev -v /dev/:/dev/ -v /run/udev:/run/udev:ro -it quay.io/kaszpir/prusa-connect-script:03c4886-arm64\n\ndocker run --env-file .docker-esphome-snapshot --read-only quay.io/kaszpir/prusa-connect-script:03c4886-amd64\ndocker run --env-file .docker-video0 --device /dev:/dev -v /dev/:/dev/ -v /run/udev:/run/udev:ro -it quay.io/kaszpir/prusa-connect-script:03c4886\n
"},{"location":"service.docker/#running-multiple-cameras-at-once","title":"Running multiple cameras at once","text":"

Create env file per camera and run each container separately.

"},{"location":"service.docker/#docker-compose","title":"docker-compose","text":"

Instead of running single command per container, you can manage them using docker-compose. Example docker-compose.yaml contains some examples. Some sections are commented out, though.

Notice they still require proper env files to work, for example copy usb.dist as .usb, edit its parameters and run docker-compose up

Notice that you may need to change remote cameras addresses from hostnames to IP addresses.

Another notice that sharing /dev/ or /dev/shm across different containers with different architectures may be problematic.

"},{"location":"service/","title":"Install script as service","text":"

Depending on the distro there are various options to configure scripts as service.

  • systemd - most common service on Linux systems
  • docker - run as Docker container

Other - not implemented, do it on your own.

"},{"location":"service.systemd/","title":"Install script as systemd service","text":"

Depending on the distro there are various options to configure scripts as service. On newer distros Raspberry Pi runs systemd, we will use that.

cd /home/pi/src/prusa-connect-camera-script\nsudo cp -f prusa-connect-camera@.service /etc/systemd/system/prusa-connect-camera@.service\nsudo systemctl daemon-reload\n
"},{"location":"service.systemd/#configuring-single-camera","title":"Configuring single camera","text":"

Assuming that /home/pi/src/prusa-connect-camera-script/.env file was created in previous steps, we use that .env file as example camera config.

Notice there is no dot before env in the commands below!

sudo systemctl enable prusa-connect-camera@env.service\nsudo systemctl start prusa-connect-camera@env.service\nsudo systemctl status prusa-connect-camera@env.service\n

Above commands will enable given service on device restart (reboot), start the service and show current status.

"},{"location":"service.systemd/#configure-multiple-cameras","title":"Configure multiple cameras","text":"

This project allows spawning multiple systemd units. The suffix after @ defines what env file to load from given path. For example if you set unit file name to prusa-connect-camera@csi.service then systemd will load env vars from the file under path /home/pi/src/prusa-connect-camera-script/.csi

So in short:

  • copy csi.dist as .csi and edit it
  • copy prusa-connect-camera@.service as prusa-connect-camera@csi.service
  • you may additionally edit unit file if you use different config paths
  • run systemctl daemon-reload
  • enable systemd service
  • start systemd service
cd /home/pi/src/prusa-connect-camera-script/\ncp csi.dist .csi\n# edit .csi and set custom command params, token and fingerprint etc...\nsudo systemctl enable prusa-connect-camera@csi.service\nsudo systemctl start prusa-connect-camera@csi.service\nsudo systemctl status prusa-connect-camera@csi.service\n

For another camera, let say for another camera attached over USB

cd /home/pi/src/prusa-connect-camera-script/\ncp usb.dist .usb1\n# edit .usb1 and set device, token and fingerprint etc...\nsudo systemctl enable prusa-connect-camera@usb1.service\nsudo systemctl start prusa-connect-camera@usb1.service\nsudo systemctl status prusa-connect-camera@usb1.service\n

For esphome camera, for static images:

cd /home/pi/src/prusa-connect-camera-script/\ncp esphome-snapshot.dist .esphome1\n# edit .esphome1 and set device, token and fingerprint etc...\nsudo systemctl enable prusa-connect-camera@esphome1.service\nsudo systemctl start prusa-connect-camera@esphome1.service\nsudo systemctl status prusa-connect-camera@esphome1.service\n

I hope you get the idea...

"},{"location":"service.systemd/#uninstall-systemd-service","title":"Uninstall systemd service","text":"

Just run two commands per camera (where csi is a camera config):

sudo systemctl stop prusa-connect-camera@csi.service\nsudo systemctl disable prusa-connect-camera@csi.service\n

After removing all cameras remove systemd service definition and reload daemon:

sudo rm -f /etc/systemd/system/prusa-connect-camera@.service\nsudo systemctl daemon-reload\n
"},{"location":"stream.mediamtx/","title":"mediamtx","text":"

Use mediamtx on another Raspberry Pi to create RTSP camera stream for test.

Assuming you run mediamtx with Raspberry Pi CSI camera and that raspberry-pi is the hostname of your device and that you expose two cams:

  • CSI Rasberry Pi camera under /dev/video0
  • USB camera under /dev/video1

so your mediamtx.yml has config fragment such as:

paths:\n  cam:\n    source: rpiCamera\n\n  endoscope:\n    runOnInit: ffmpeg -f v4l2 -pix_fmt mjpeg -video_size 1280x960 -framerate 30 -i /dev/video1 -c:v libx264 -preset ultrafast -b:v 6000k -f rtsp rtsp://localhost:$RTSP_PORT/$MTX_PATH\n    runOnInitRestart: yes\n

Start mediamtx server:

./mediamtx\n

This should allow us to reach two streams, replace rpi-address with the name of your Raspberry Pi hostname or IP address. The ports are default for mediamtx.

ffplay rtsp://rpi-address:8554/cam\nffplay rtsp://rpi-address:8554/endoscope\n

Or you could watch it via web browser under endpoints such as

http://rpi-address:8889/cam\nhttp://rpi-address:8889/endoscope\n
"},{"location":"test.config/","title":"Test the config","text":"
  • ensure to turn on the 3D Printer so that it sends telemetry, otherwise images will sent and you will get successful image uploads but on PrusaConnect page they will not be available
  • run below commands, we assume .env is the camera config we defined earlier
set -o allexport; source .env; set +o allexport\n./prusa-connect-camera.sh\n

Above commands will load env vars and will start the script. In the beginning script shows some commands that will be executed, for example command to fetch the image from camera, example log line:

Camera capture command: fswebcam -d /dev/video0 --resolution 640x480 --no-banner /dev/shm/camera_87299de9-ea57-45be-b6ea-4d388a52c954.jpg\n

so you should run:

fswebcam -d /dev/video0 --resolution 640x480 --no-banner /dev/shm/camera_87299de9-ea57-45be-b6ea-4d388a52c954.jpg\n

and get the outputs from the command, and also it should write an image.

Check for errors, if any, if everything is ok you should see a lot of 204 every 10s.

If not, well, raise an issue on GitHub.

"}]} \ No newline at end of file diff --git a/service.docker/index.html b/service.docker/index.html new file mode 100644 index 0000000..5a37dc7 --- /dev/null +++ b/service.docker/index.html @@ -0,0 +1,978 @@ + + + + + + + + + + + + + + + + + + + + Install script as docker container - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + + + +

Install script as docker container

+

You can run the app as container.

+

Multi-platform images are available at quay.io/kaszpir/prusa-connect-script.

+

Currently available platforms:

+
    +
  • linux/amd64 (64bit)
  • +
  • linux/arm64 (64bit)
  • +
  • linux/arm/v7 (32bit)
  • +
+

Preparation of the host

+

Install docker on Debian.

+

Optional - you may want to make sure current user is in docker group so it is possible +to run containers without using sudo:

+
sudo usermod -a -G docker $(whoami)
+
+

logout and login again, or reboot Raspberry Pi.

+

Preparation of env files for docker command

+

Notice - you do not have to do it if you use docker-compose.

+

If you use docker command directly you need to edit env files +and remove quotation marks from the files (this is a limitation of the Docker)

+

For example:

+
CAMERA_COMMAND_EXTRA_PARAMS="--immediate --nopreview --thumb none -o"
+
+

becomes

+
CAMERA_COMMAND_EXTRA_PARAMS=--immediate --nopreview --thumb none -o
+
+

Raspberry Pi CSI or USB camera

+

We assume that .csi is a env file with example variables after edit, it is +possible to run below command and have screenshots sent to the Prusa Connect.

+ +
docker run --env-file .csi -v /run/udev:/run/udev:ro -v /dev/:/dev/ --device /dev:/dev --read-only quay.io/kaszpir/prusa-connect-script:03c4886
+
+ + +

Raspberry Pi and remote cams

+

If you use remote camera you can make command even shorter:

+
docker run --env-file .esp32 --read-only quay.io/kaszpir/prusa-connect-script:03c4886
+
+

Other examples

+ +
docker run --env-file .docker-csi --device /dev:/dev -v /dev/:/dev/ -v /run/udev:/run/udev:ro -it quay.io/kaszpir/prusa-connect-script:03c4886-arm64
+
+docker run --env-file .docker-esphome-snapshot --read-only quay.io/kaszpir/prusa-connect-script:03c4886-amd64
+docker run --env-file .docker-video0 --device /dev:/dev -v /dev/:/dev/ -v /run/udev:/run/udev:ro -it quay.io/kaszpir/prusa-connect-script:03c4886
+
+ + +

Running multiple cameras at once

+

Create env file per camera and run each container separately.

+

docker-compose

+

Instead of running single command per container, you can manage them using +docker-compose. Example docker-compose.yaml contains some examples. +Some sections are commented out, though.

+

Notice they still require proper env files to work, for example +copy usb.dist as .usb, edit its parameters and run docker-compose up

+

Notice that you may need to change remote cameras addresses from hostnames +to IP addresses.

+

Another notice that sharing /dev/ or /dev/shm across different containers +with different architectures may be problematic.

+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/service.systemd/index.html b/service.systemd/index.html new file mode 100644 index 0000000..266b7e1 --- /dev/null +++ b/service.systemd/index.html @@ -0,0 +1,1047 @@ + + + + + + + + + + + + + + + + + + + + + + + + Systemd - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Install script as systemd service

+

Depending on the distro there are various options to configure scripts as service. +On newer distros Raspberry Pi runs systemd, we will use that.

+
cd /home/pi/src/prusa-connect-camera-script
+sudo cp -f prusa-connect-camera@.service /etc/systemd/system/prusa-connect-camera@.service
+sudo systemctl daemon-reload
+
+

Configuring single camera

+

Assuming that /home/pi/src/prusa-connect-camera-script/.env file was created in +previous steps, we use that .env file as example camera config.

+

Notice there is no dot before env in the commands below!

+
sudo systemctl enable prusa-connect-camera@env.service
+sudo systemctl start prusa-connect-camera@env.service
+sudo systemctl status prusa-connect-camera@env.service
+
+

Above commands will enable given service on device restart (reboot), +start the service and show current status.

+

Configure multiple cameras

+

This project allows spawning multiple systemd units. +The suffix after @ defines what env file to load from given path. +For example if you set unit file name to prusa-connect-camera@csi.service +then systemd will load env vars from the file under path +/home/pi/src/prusa-connect-camera-script/.csi

+

So in short:

+
    +
  • copy csi.dist as .csi and edit it
  • +
  • copy prusa-connect-camera@.service as prusa-connect-camera@csi.service
  • +
  • you may additionally edit unit file if you use different config paths
  • +
  • run systemctl daemon-reload
  • +
  • enable systemd service
  • +
  • start systemd service
  • +
+
cd /home/pi/src/prusa-connect-camera-script/
+cp csi.dist .csi
+# edit .csi and set custom command params, token and fingerprint etc...
+sudo systemctl enable prusa-connect-camera@csi.service
+sudo systemctl start prusa-connect-camera@csi.service
+sudo systemctl status prusa-connect-camera@csi.service
+
+

For another camera, let say for another camera attached over USB

+
cd /home/pi/src/prusa-connect-camera-script/
+cp usb.dist .usb1
+# edit .usb1 and set device, token and fingerprint etc...
+sudo systemctl enable prusa-connect-camera@usb1.service
+sudo systemctl start prusa-connect-camera@usb1.service
+sudo systemctl status prusa-connect-camera@usb1.service
+
+

For esphome camera, for static images:

+
cd /home/pi/src/prusa-connect-camera-script/
+cp esphome-snapshot.dist .esphome1
+# edit .esphome1 and set device, token and fingerprint etc...
+sudo systemctl enable prusa-connect-camera@esphome1.service
+sudo systemctl start prusa-connect-camera@esphome1.service
+sudo systemctl status prusa-connect-camera@esphome1.service
+
+

I hope you get the idea...

+

Uninstall systemd service

+

Just run two commands per camera (where csi is a camera config):

+
sudo systemctl stop prusa-connect-camera@csi.service
+sudo systemctl disable prusa-connect-camera@csi.service
+
+

After removing all cameras remove systemd service definition and reload daemon:

+
sudo rm -f /etc/systemd/system/prusa-connect-camera@.service
+sudo systemctl daemon-reload
+
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/service/index.html b/service/index.html new file mode 100644 index 0000000..f88c5fc --- /dev/null +++ b/service/index.html @@ -0,0 +1,915 @@ + + + + + + + + + + + + + + + + + + + + + + + + Overview - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Install script as service

+

Depending on the distro there are various options to configure scripts as service.

+
    +
  • systemd - most common service on Linux systems
  • +
  • docker - run as Docker container
  • +
+

Other - not implemented, do it on your own.

+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 0000000..1fc7158 --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,118 @@ + + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera.csi.legacy/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera.csi.libcamera/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera.esphome.snapshot/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera.esphome.stream/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera.mjpg/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera.rtsp/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera.snapshot/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/config.for.camera.usb/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/configuration.env.full/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/configuration.env/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/configuration.overview/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/configuration.tuning/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/installation/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/performance/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/prusa.connect/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/requirements/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/service.docker/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/service/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/service.systemd/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/stream.mediamtx/ + 2024-04-30 + daily + + + https://nvtkaszpir.github.io/prusa-connect-camera-script/test.config/ + 2024-04-30 + daily + + \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz new file mode 100644 index 0000000..851b71b Binary files /dev/null and b/sitemap.xml.gz differ diff --git a/static/esp32-camera.jpg b/static/esp32-camera.jpg new file mode 100644 index 0000000..0f7e3d0 Binary files /dev/null and b/static/esp32-camera.jpg differ diff --git a/static/pi-camera.jpg b/static/pi-camera.jpg new file mode 100644 index 0000000..f908ed4 Binary files /dev/null and b/static/pi-camera.jpg differ diff --git a/static/prusa-connect-cam-small.png b/static/prusa-connect-cam-small.png new file mode 100644 index 0000000..9098616 Binary files /dev/null and b/static/prusa-connect-cam-small.png differ diff --git a/static/prusa-connect-cam.png b/static/prusa-connect-cam.png new file mode 100644 index 0000000..e789c0b Binary files /dev/null and b/static/prusa-connect-cam.png differ diff --git a/static/usb_cam.png b/static/usb_cam.png new file mode 100644 index 0000000..daca6ba Binary files /dev/null and b/static/usb_cam.png differ diff --git a/stream.mediamtx/index.html b/stream.mediamtx/index.html new file mode 100644 index 0000000..122aa9a --- /dev/null +++ b/stream.mediamtx/index.html @@ -0,0 +1,925 @@ + + + + + + + + + + + + + + + + + + + + + + Streaming cameras with mediamtx - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

mediamtx

+

Use mediamtx on another Raspberry Pi +to create RTSP camera stream for test.

+

Assuming you run mediamtx with Raspberry Pi CSI camera +and that raspberry-pi is the hostname of your device and that you expose two cams:

+
    +
  • CSI Rasberry Pi camera under /dev/video0
  • +
  • USB camera under /dev/video1
  • +
+

so your mediamtx.yml has config fragment such as:

+ + +
paths:
+  cam:
+    source: rpiCamera
+
+  endoscope:
+    runOnInit: ffmpeg -f v4l2 -pix_fmt mjpeg -video_size 1280x960 -framerate 30 -i /dev/video1 -c:v libx264 -preset ultrafast -b:v 6000k -f rtsp rtsp://localhost:$RTSP_PORT/$MTX_PATH
+    runOnInitRestart: yes
+
+ + +

Start mediamtx server:

+
./mediamtx
+
+

This should allow us to reach two streams, replace rpi-address with the name +of your Raspberry Pi hostname or IP address. The ports are default for mediamtx.

+
ffplay rtsp://rpi-address:8554/cam
+ffplay rtsp://rpi-address:8554/endoscope
+
+

Or you could watch it via web browser under endpoints such as

+
http://rpi-address:8889/cam
+http://rpi-address:8889/endoscope
+
+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/test.config/index.html b/test.config/index.html new file mode 100644 index 0000000..4e4c807 --- /dev/null +++ b/test.config/index.html @@ -0,0 +1,933 @@ + + + + + + + + + + + + + + + + + + + + + + + + Test Config - Prusa Connect Camera Script + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + +

Test the config

+
    +
  • ensure to turn on the 3D Printer so that it sends telemetry, otherwise images + will sent and you will get successful image uploads but on PrusaConnect page + they will not be available
  • +
  • run below commands, we assume .env is the camera config we defined earlier
  • +
+
set -o allexport; source .env; set +o allexport
+./prusa-connect-camera.sh
+
+

Above commands will load env vars and will start the script. +In the beginning script shows some commands that will be executed, for example +command to fetch the image from camera, example log line:

+ +
Camera capture command: fswebcam -d /dev/video0 --resolution 640x480 --no-banner /dev/shm/camera_87299de9-ea57-45be-b6ea-4d388a52c954.jpg
+
+

so you should run:

+
fswebcam -d /dev/video0 --resolution 640x480 --no-banner /dev/shm/camera_87299de9-ea57-45be-b6ea-4d388a52c954.jpg
+
+ + +

and get the outputs from the command, and also it should write an image.

+

Check for errors, if any, if everything is ok you should see a lot of 204 +every 10s.

+

If not, well, raise an issue on GitHub.

+ +
+
+ + + Last update: + 2024-04-30 21:14:06 +0200 (UTC) + + + +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file