diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/404.html b/404.html new file mode 100644 index 0000000..0470b22 --- /dev/null +++ b/404.html @@ -0,0 +1,454 @@ + + + + + + + + + + + + + + + + + + + + + + + VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/assets/images/favicon.png b/assets/images/favicon.png new file mode 100644 index 0000000..1cf13b9 Binary files /dev/null and b/assets/images/favicon.png differ diff --git a/assets/javascripts/bundle.1e8ae164.min.js b/assets/javascripts/bundle.1e8ae164.min.js new file mode 100644 index 0000000..2129798 --- /dev/null +++ b/assets/javascripts/bundle.1e8ae164.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var _i=Object.create;var br=Object.defineProperty;var Ai=Object.getOwnPropertyDescriptor;var Ci=Object.getOwnPropertyNames,Ft=Object.getOwnPropertySymbols,ki=Object.getPrototypeOf,vr=Object.prototype.hasOwnProperty,eo=Object.prototype.propertyIsEnumerable;var Zr=(e,t,r)=>t in e?br(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,F=(e,t)=>{for(var r in t||(t={}))vr.call(t,r)&&Zr(e,r,t[r]);if(Ft)for(var r of Ft(t))eo.call(t,r)&&Zr(e,r,t[r]);return e};var to=(e,t)=>{var r={};for(var o in e)vr.call(e,o)&&t.indexOf(o)<0&&(r[o]=e[o]);if(e!=null&&Ft)for(var o of Ft(e))t.indexOf(o)<0&&eo.call(e,o)&&(r[o]=e[o]);return r};var gr=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Hi=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of Ci(t))!vr.call(e,n)&&n!==r&&br(e,n,{get:()=>t[n],enumerable:!(o=Ai(t,n))||o.enumerable});return e};var jt=(e,t,r)=>(r=e!=null?_i(ki(e)):{},Hi(t||!e||!e.__esModule?br(r,"default",{value:e,enumerable:!0}):r,e));var ro=(e,t,r)=>new Promise((o,n)=>{var i=c=>{try{s(r.next(c))}catch(p){n(p)}},a=c=>{try{s(r.throw(c))}catch(p){n(p)}},s=c=>c.done?o(c.value):Promise.resolve(c.value).then(i,a);s((r=r.apply(e,t)).next())});var no=gr((xr,oo)=>{(function(e,t){typeof xr=="object"&&typeof oo!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(xr,function(){"use strict";function e(r){var o=!0,n=!1,i=null,a={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function s(C){return!!(C&&C!==document&&C.nodeName!=="HTML"&&C.nodeName!=="BODY"&&"classList"in C&&"contains"in C.classList)}function c(C){var ct=C.type,Ne=C.tagName;return!!(Ne==="INPUT"&&a[ct]&&!C.readOnly||Ne==="TEXTAREA"&&!C.readOnly||C.isContentEditable)}function p(C){C.classList.contains("focus-visible")||(C.classList.add("focus-visible"),C.setAttribute("data-focus-visible-added",""))}function l(C){C.hasAttribute("data-focus-visible-added")&&(C.classList.remove("focus-visible"),C.removeAttribute("data-focus-visible-added"))}function f(C){C.metaKey||C.altKey||C.ctrlKey||(s(r.activeElement)&&p(r.activeElement),o=!0)}function u(C){o=!1}function h(C){s(C.target)&&(o||c(C.target))&&p(C.target)}function w(C){s(C.target)&&(C.target.classList.contains("focus-visible")||C.target.hasAttribute("data-focus-visible-added"))&&(n=!0,window.clearTimeout(i),i=window.setTimeout(function(){n=!1},100),l(C.target))}function A(C){document.visibilityState==="hidden"&&(n&&(o=!0),Z())}function Z(){document.addEventListener("mousemove",J),document.addEventListener("mousedown",J),document.addEventListener("mouseup",J),document.addEventListener("pointermove",J),document.addEventListener("pointerdown",J),document.addEventListener("pointerup",J),document.addEventListener("touchmove",J),document.addEventListener("touchstart",J),document.addEventListener("touchend",J)}function te(){document.removeEventListener("mousemove",J),document.removeEventListener("mousedown",J),document.removeEventListener("mouseup",J),document.removeEventListener("pointermove",J),document.removeEventListener("pointerdown",J),document.removeEventListener("pointerup",J),document.removeEventListener("touchmove",J),document.removeEventListener("touchstart",J),document.removeEventListener("touchend",J)}function J(C){C.target.nodeName&&C.target.nodeName.toLowerCase()==="html"||(o=!1,te())}document.addEventListener("keydown",f,!0),document.addEventListener("mousedown",u,!0),document.addEventListener("pointerdown",u,!0),document.addEventListener("touchstart",u,!0),document.addEventListener("visibilitychange",A,!0),Z(),r.addEventListener("focus",h,!0),r.addEventListener("blur",w,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var zr=gr((kt,Vr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof kt=="object"&&typeof Vr=="object"?Vr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof kt=="object"?kt.ClipboardJS=r():t.ClipboardJS=r()})(kt,function(){return function(){var e={686:function(o,n,i){"use strict";i.d(n,{default:function(){return Li}});var a=i(279),s=i.n(a),c=i(370),p=i.n(c),l=i(817),f=i.n(l);function u(D){try{return document.execCommand(D)}catch(M){return!1}}var h=function(M){var O=f()(M);return u("cut"),O},w=h;function A(D){var M=document.documentElement.getAttribute("dir")==="rtl",O=document.createElement("textarea");O.style.fontSize="12pt",O.style.border="0",O.style.padding="0",O.style.margin="0",O.style.position="absolute",O.style[M?"right":"left"]="-9999px";var I=window.pageYOffset||document.documentElement.scrollTop;return O.style.top="".concat(I,"px"),O.setAttribute("readonly",""),O.value=D,O}var Z=function(M,O){var I=A(M);O.container.appendChild(I);var W=f()(I);return u("copy"),I.remove(),W},te=function(M){var O=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},I="";return typeof M=="string"?I=Z(M,O):M instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(M==null?void 0:M.type)?I=Z(M.value,O):(I=f()(M),u("copy")),I},J=te;function C(D){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?C=function(O){return typeof O}:C=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},C(D)}var ct=function(){var M=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},O=M.action,I=O===void 0?"copy":O,W=M.container,K=M.target,Ce=M.text;if(I!=="copy"&&I!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(K!==void 0)if(K&&C(K)==="object"&&K.nodeType===1){if(I==="copy"&&K.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(I==="cut"&&(K.hasAttribute("readonly")||K.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Ce)return J(Ce,{container:W});if(K)return I==="cut"?w(K):J(K,{container:W})},Ne=ct;function Pe(D){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Pe=function(O){return typeof O}:Pe=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},Pe(D)}function xi(D,M){if(!(D instanceof M))throw new TypeError("Cannot call a class as a function")}function Xr(D,M){for(var O=0;O0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof W.action=="function"?W.action:this.defaultAction,this.target=typeof W.target=="function"?W.target:this.defaultTarget,this.text=typeof W.text=="function"?W.text:this.defaultText,this.container=Pe(W.container)==="object"?W.container:document.body}},{key:"listenClick",value:function(W){var K=this;this.listener=p()(W,"click",function(Ce){return K.onClick(Ce)})}},{key:"onClick",value:function(W){var K=W.delegateTarget||W.currentTarget,Ce=this.action(K)||"copy",It=Ne({action:Ce,container:this.container,target:this.target(K),text:this.text(K)});this.emit(It?"success":"error",{action:Ce,text:It,trigger:K,clearSelection:function(){K&&K.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(W){return hr("action",W)}},{key:"defaultTarget",value:function(W){var K=hr("target",W);if(K)return document.querySelector(K)}},{key:"defaultText",value:function(W){return hr("text",W)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(W){var K=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return J(W,K)}},{key:"cut",value:function(W){return w(W)}},{key:"isSupported",value:function(){var W=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],K=typeof W=="string"?[W]:W,Ce=!!document.queryCommandSupported;return K.forEach(function(It){Ce=Ce&&!!document.queryCommandSupported(It)}),Ce}}]),O}(s()),Li=Mi},828:function(o){var n=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function a(s,c){for(;s&&s.nodeType!==n;){if(typeof s.matches=="function"&&s.matches(c))return s;s=s.parentNode}}o.exports=a},438:function(o,n,i){var a=i(828);function s(l,f,u,h,w){var A=p.apply(this,arguments);return l.addEventListener(u,A,w),{destroy:function(){l.removeEventListener(u,A,w)}}}function c(l,f,u,h,w){return typeof l.addEventListener=="function"?s.apply(null,arguments):typeof u=="function"?s.bind(null,document).apply(null,arguments):(typeof l=="string"&&(l=document.querySelectorAll(l)),Array.prototype.map.call(l,function(A){return s(A,f,u,h,w)}))}function p(l,f,u,h){return function(w){w.delegateTarget=a(w.target,f),w.delegateTarget&&h.call(l,w)}}o.exports=c},879:function(o,n){n.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},n.nodeList=function(i){var a=Object.prototype.toString.call(i);return i!==void 0&&(a==="[object NodeList]"||a==="[object HTMLCollection]")&&"length"in i&&(i.length===0||n.node(i[0]))},n.string=function(i){return typeof i=="string"||i instanceof String},n.fn=function(i){var a=Object.prototype.toString.call(i);return a==="[object Function]"}},370:function(o,n,i){var a=i(879),s=i(438);function c(u,h,w){if(!u&&!h&&!w)throw new Error("Missing required arguments");if(!a.string(h))throw new TypeError("Second argument must be a String");if(!a.fn(w))throw new TypeError("Third argument must be a Function");if(a.node(u))return p(u,h,w);if(a.nodeList(u))return l(u,h,w);if(a.string(u))return f(u,h,w);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function p(u,h,w){return u.addEventListener(h,w),{destroy:function(){u.removeEventListener(h,w)}}}function l(u,h,w){return Array.prototype.forEach.call(u,function(A){A.addEventListener(h,w)}),{destroy:function(){Array.prototype.forEach.call(u,function(A){A.removeEventListener(h,w)})}}}function f(u,h,w){return s(document.body,u,h,w)}o.exports=c},817:function(o){function n(i){var a;if(i.nodeName==="SELECT")i.focus(),a=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var s=i.hasAttribute("readonly");s||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),s||i.removeAttribute("readonly"),a=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),p=document.createRange();p.selectNodeContents(i),c.removeAllRanges(),c.addRange(p),a=c.toString()}return a}o.exports=n},279:function(o){function n(){}n.prototype={on:function(i,a,s){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:a,ctx:s}),this},once:function(i,a,s){var c=this;function p(){c.off(i,p),a.apply(s,arguments)}return p._=a,this.on(i,p,s)},emit:function(i){var a=[].slice.call(arguments,1),s=((this.e||(this.e={}))[i]||[]).slice(),c=0,p=s.length;for(c;c{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var Va=/["'&<>]/;qn.exports=za;function za(e){var t=""+e,r=Va.exec(t);if(!r)return t;var o,n="",i=0,a=0;for(i=r.index;i0&&i[i.length-1])&&(p[0]===6||p[0]===2)){r=0;continue}if(p[0]===3&&(!i||p[1]>i[0]&&p[1]=e.length&&(e=void 0),{value:e&&e[o++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function V(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var o=r.call(e),n,i=[],a;try{for(;(t===void 0||t-- >0)&&!(n=o.next()).done;)i.push(n.value)}catch(s){a={error:s}}finally{try{n&&!n.done&&(r=o.return)&&r.call(o)}finally{if(a)throw a.error}}return i}function z(e,t,r){if(r||arguments.length===2)for(var o=0,n=t.length,i;o1||s(u,h)})})}function s(u,h){try{c(o[u](h))}catch(w){f(i[0][3],w)}}function c(u){u.value instanceof ot?Promise.resolve(u.value.v).then(p,l):f(i[0][2],u)}function p(u){s("next",u)}function l(u){s("throw",u)}function f(u,h){u(h),i.shift(),i.length&&s(i[0][0],i[0][1])}}function so(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof ue=="function"?ue(e):e[Symbol.iterator](),r={},o("next"),o("throw"),o("return"),r[Symbol.asyncIterator]=function(){return this},r);function o(i){r[i]=e[i]&&function(a){return new Promise(function(s,c){a=e[i](a),n(s,c,a.done,a.value)})}}function n(i,a,s,c){Promise.resolve(c).then(function(p){i({value:p,done:s})},a)}}function k(e){return typeof e=="function"}function pt(e){var t=function(o){Error.call(o),o.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var Wt=pt(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(o,n){return n+1+") "+o.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function Ve(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Ie=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,o,n,i;if(!this.closed){this.closed=!0;var a=this._parentage;if(a)if(this._parentage=null,Array.isArray(a))try{for(var s=ue(a),c=s.next();!c.done;c=s.next()){var p=c.value;p.remove(this)}}catch(A){t={error:A}}finally{try{c&&!c.done&&(r=s.return)&&r.call(s)}finally{if(t)throw t.error}}else a.remove(this);var l=this.initialTeardown;if(k(l))try{l()}catch(A){i=A instanceof Wt?A.errors:[A]}var f=this._finalizers;if(f){this._finalizers=null;try{for(var u=ue(f),h=u.next();!h.done;h=u.next()){var w=h.value;try{co(w)}catch(A){i=i!=null?i:[],A instanceof Wt?i=z(z([],V(i)),V(A.errors)):i.push(A)}}}catch(A){o={error:A}}finally{try{h&&!h.done&&(n=u.return)&&n.call(u)}finally{if(o)throw o.error}}}if(i)throw new Wt(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)co(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&Ve(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&Ve(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Er=Ie.EMPTY;function Dt(e){return e instanceof Ie||e&&"closed"in e&&k(e.remove)&&k(e.add)&&k(e.unsubscribe)}function co(e){k(e)?e():e.unsubscribe()}var ke={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var lt={setTimeout:function(e,t){for(var r=[],o=2;o0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var o=this,n=this,i=n.hasError,a=n.isStopped,s=n.observers;return i||a?Er:(this.currentObservers=null,s.push(r),new Ie(function(){o.currentObservers=null,Ve(s,r)}))},t.prototype._checkFinalizedStatuses=function(r){var o=this,n=o.hasError,i=o.thrownError,a=o.isStopped;n?r.error(i):a&&r.complete()},t.prototype.asObservable=function(){var r=new j;return r.source=this,r},t.create=function(r,o){return new vo(r,o)},t}(j);var vo=function(e){se(t,e);function t(r,o){var n=e.call(this)||this;return n.destination=r,n.source=o,n}return t.prototype.next=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.next)===null||n===void 0||n.call(o,r)},t.prototype.error=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.error)===null||n===void 0||n.call(o,r)},t.prototype.complete=function(){var r,o;(o=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||o===void 0||o.call(r)},t.prototype._subscribe=function(r){var o,n;return(n=(o=this.source)===null||o===void 0?void 0:o.subscribe(r))!==null&&n!==void 0?n:Er},t}(g);var St={now:function(){return(St.delegate||Date).now()},delegate:void 0};var Ot=function(e){se(t,e);function t(r,o,n){r===void 0&&(r=1/0),o===void 0&&(o=1/0),n===void 0&&(n=St);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=o,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=o===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,o),i}return t.prototype.next=function(r){var o=this,n=o.isStopped,i=o._buffer,a=o._infiniteTimeWindow,s=o._timestampProvider,c=o._windowTime;n||(i.push(r),!a&&i.push(s.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var o=this._innerSubscribe(r),n=this,i=n._infiniteTimeWindow,a=n._buffer,s=a.slice(),c=0;c0?e.prototype.requestAsyncId.call(this,r,o,n):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,o,n){var i;if(n===void 0&&(n=0),n!=null?n>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,o,n);var a=r.actions;o!=null&&((i=a[a.length-1])===null||i===void 0?void 0:i.id)!==o&&(ut.cancelAnimationFrame(o),r._scheduled=void 0)},t}(zt);var yo=function(e){se(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var o=this._scheduled;this._scheduled=void 0;var n=this.actions,i;r=r||n.shift();do if(i=r.execute(r.state,r.delay))break;while((r=n[0])&&r.id===o&&n.shift());if(this._active=!1,i){for(;(r=n[0])&&r.id===o&&n.shift();)r.unsubscribe();throw i}},t}(qt);var de=new yo(xo);var L=new j(function(e){return e.complete()});function Kt(e){return e&&k(e.schedule)}function _r(e){return e[e.length-1]}function Je(e){return k(_r(e))?e.pop():void 0}function Ae(e){return Kt(_r(e))?e.pop():void 0}function Qt(e,t){return typeof _r(e)=="number"?e.pop():t}var dt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Yt(e){return k(e==null?void 0:e.then)}function Bt(e){return k(e[ft])}function Gt(e){return Symbol.asyncIterator&&k(e==null?void 0:e[Symbol.asyncIterator])}function Jt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Di(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Xt=Di();function Zt(e){return k(e==null?void 0:e[Xt])}function er(e){return ao(this,arguments,function(){var r,o,n,i;return Ut(this,function(a){switch(a.label){case 0:r=e.getReader(),a.label=1;case 1:a.trys.push([1,,9,10]),a.label=2;case 2:return[4,ot(r.read())];case 3:return o=a.sent(),n=o.value,i=o.done,i?[4,ot(void 0)]:[3,5];case 4:return[2,a.sent()];case 5:return[4,ot(n)];case 6:return[4,a.sent()];case 7:return a.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function tr(e){return k(e==null?void 0:e.getReader)}function N(e){if(e instanceof j)return e;if(e!=null){if(Bt(e))return Ni(e);if(dt(e))return Vi(e);if(Yt(e))return zi(e);if(Gt(e))return Eo(e);if(Zt(e))return qi(e);if(tr(e))return Ki(e)}throw Jt(e)}function Ni(e){return new j(function(t){var r=e[ft]();if(k(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Vi(e){return new j(function(t){for(var r=0;r=2;return function(o){return o.pipe(e?b(function(n,i){return e(n,i,o)}):ce,ye(1),r?Qe(t):jo(function(){return new or}))}}function $r(e){return e<=0?function(){return L}:x(function(t,r){var o=[];t.subscribe(S(r,function(n){o.push(n),e=2,!0))}function le(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new g}:t,o=e.resetOnError,n=o===void 0?!0:o,i=e.resetOnComplete,a=i===void 0?!0:i,s=e.resetOnRefCountZero,c=s===void 0?!0:s;return function(p){var l,f,u,h=0,w=!1,A=!1,Z=function(){f==null||f.unsubscribe(),f=void 0},te=function(){Z(),l=u=void 0,w=A=!1},J=function(){var C=l;te(),C==null||C.unsubscribe()};return x(function(C,ct){h++,!A&&!w&&Z();var Ne=u=u!=null?u:r();ct.add(function(){h--,h===0&&!A&&!w&&(f=Pr(J,c))}),Ne.subscribe(ct),!l&&h>0&&(l=new it({next:function(Pe){return Ne.next(Pe)},error:function(Pe){A=!0,Z(),f=Pr(te,n,Pe),Ne.error(Pe)},complete:function(){w=!0,Z(),f=Pr(te,a),Ne.complete()}}),N(C).subscribe(l))})(p)}}function Pr(e,t){for(var r=[],o=2;oe.next(document)),e}function R(e,t=document){return Array.from(t.querySelectorAll(e))}function P(e,t=document){let r=me(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function me(e,t=document){return t.querySelector(e)||void 0}function Re(){var e,t,r,o;return(o=(r=(t=(e=document.activeElement)==null?void 0:e.shadowRoot)==null?void 0:t.activeElement)!=null?r:document.activeElement)!=null?o:void 0}var la=T(d(document.body,"focusin"),d(document.body,"focusout")).pipe(be(1),q(void 0),m(()=>Re()||document.body),B(1));function vt(e){return la.pipe(m(t=>e.contains(t)),Y())}function Vo(e,t){return T(d(e,"mouseenter").pipe(m(()=>!0)),d(e,"mouseleave").pipe(m(()=>!1))).pipe(t?be(t):ce,q(!1))}function Ue(e){return{x:e.offsetLeft,y:e.offsetTop}}function zo(e){return T(d(window,"load"),d(window,"resize")).pipe(Me(0,de),m(()=>Ue(e)),q(Ue(e)))}function ir(e){return{x:e.scrollLeft,y:e.scrollTop}}function et(e){return T(d(e,"scroll"),d(window,"resize")).pipe(Me(0,de),m(()=>ir(e)),q(ir(e)))}function qo(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)qo(e,r)}function E(e,t,...r){let o=document.createElement(e);if(t)for(let n of Object.keys(t))typeof t[n]!="undefined"&&(typeof t[n]!="boolean"?o.setAttribute(n,t[n]):o.setAttribute(n,""));for(let n of r)qo(o,n);return o}function ar(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function gt(e){let t=E("script",{src:e});return H(()=>(document.head.appendChild(t),T(d(t,"load"),d(t,"error").pipe(v(()=>Ar(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),_(()=>document.head.removeChild(t)),ye(1))))}var Ko=new g,ma=H(()=>typeof ResizeObserver=="undefined"?gt("https://unpkg.com/resize-observer-polyfill"):$(void 0)).pipe(m(()=>new ResizeObserver(e=>{for(let t of e)Ko.next(t)})),v(e=>T(qe,$(e)).pipe(_(()=>e.disconnect()))),B(1));function pe(e){return{width:e.offsetWidth,height:e.offsetHeight}}function Ee(e){return ma.pipe(y(t=>t.observe(e)),v(t=>Ko.pipe(b(({target:r})=>r===e),_(()=>t.unobserve(e)),m(()=>pe(e)))),q(pe(e)))}function xt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function sr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var Qo=new g,fa=H(()=>$(new IntersectionObserver(e=>{for(let t of e)Qo.next(t)},{threshold:0}))).pipe(v(e=>T(qe,$(e)).pipe(_(()=>e.disconnect()))),B(1));function yt(e){return fa.pipe(y(t=>t.observe(e)),v(t=>Qo.pipe(b(({target:r})=>r===e),_(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function Yo(e,t=16){return et(e).pipe(m(({y:r})=>{let o=pe(e),n=xt(e);return r>=n.height-o.height-t}),Y())}var cr={drawer:P("[data-md-toggle=drawer]"),search:P("[data-md-toggle=search]")};function Bo(e){return cr[e].checked}function Be(e,t){cr[e].checked!==t&&cr[e].click()}function We(e){let t=cr[e];return d(t,"change").pipe(m(()=>t.checked),q(t.checked))}function ua(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function da(){return T(d(window,"compositionstart").pipe(m(()=>!0)),d(window,"compositionend").pipe(m(()=>!1))).pipe(q(!1))}function Go(){let e=d(window,"keydown").pipe(b(t=>!(t.metaKey||t.ctrlKey)),m(t=>({mode:Bo("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),b(({mode:t,type:r})=>{if(t==="global"){let o=Re();if(typeof o!="undefined")return!ua(o,r)}return!0}),le());return da().pipe(v(t=>t?L:e))}function ve(){return new URL(location.href)}function st(e,t=!1){if(G("navigation.instant")&&!t){let r=E("a",{href:e.href});document.body.appendChild(r),r.click(),r.remove()}else location.href=e.href}function Jo(){return new g}function Xo(){return location.hash.slice(1)}function Zo(e){let t=E("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function ha(e){return T(d(window,"hashchange"),e).pipe(m(Xo),q(Xo()),b(t=>t.length>0),B(1))}function en(e){return ha(e).pipe(m(t=>me(`[id="${t}"]`)),b(t=>typeof t!="undefined"))}function At(e){let t=matchMedia(e);return nr(r=>t.addListener(()=>r(t.matches))).pipe(q(t.matches))}function tn(){let e=matchMedia("print");return T(d(window,"beforeprint").pipe(m(()=>!0)),d(window,"afterprint").pipe(m(()=>!1))).pipe(q(e.matches))}function Ur(e,t){return e.pipe(v(r=>r?t():L))}function Wr(e,t){return new j(r=>{let o=new XMLHttpRequest;return o.open("GET",`${e}`),o.responseType="blob",o.addEventListener("load",()=>{o.status>=200&&o.status<300?(r.next(o.response),r.complete()):r.error(new Error(o.statusText))}),o.addEventListener("error",()=>{r.error(new Error("Network error"))}),o.addEventListener("abort",()=>{r.complete()}),typeof(t==null?void 0:t.progress$)!="undefined"&&(o.addEventListener("progress",n=>{var i;if(n.lengthComputable)t.progress$.next(n.loaded/n.total*100);else{let a=(i=o.getResponseHeader("Content-Length"))!=null?i:0;t.progress$.next(n.loaded/+a*100)}}),t.progress$.next(5)),o.send(),()=>o.abort()})}function De(e,t){return Wr(e,t).pipe(v(r=>r.text()),m(r=>JSON.parse(r)),B(1))}function rn(e,t){let r=new DOMParser;return Wr(e,t).pipe(v(o=>o.text()),m(o=>r.parseFromString(o,"text/html")),B(1))}function on(e,t){let r=new DOMParser;return Wr(e,t).pipe(v(o=>o.text()),m(o=>r.parseFromString(o,"text/xml")),B(1))}function nn(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function an(){return T(d(window,"scroll",{passive:!0}),d(window,"resize",{passive:!0})).pipe(m(nn),q(nn()))}function sn(){return{width:innerWidth,height:innerHeight}}function cn(){return d(window,"resize",{passive:!0}).pipe(m(sn),q(sn()))}function pn(){return Q([an(),cn()]).pipe(m(([e,t])=>({offset:e,size:t})),B(1))}function pr(e,{viewport$:t,header$:r}){let o=t.pipe(X("size")),n=Q([o,r]).pipe(m(()=>Ue(e)));return Q([r,t,n]).pipe(m(([{height:i},{offset:a,size:s},{x:c,y:p}])=>({offset:{x:a.x-c,y:a.y-p+i},size:s})))}function ba(e){return d(e,"message",t=>t.data)}function va(e){let t=new g;return t.subscribe(r=>e.postMessage(r)),t}function ln(e,t=new Worker(e)){let r=ba(t),o=va(t),n=new g;n.subscribe(o);let i=o.pipe(ee(),oe(!0));return n.pipe(ee(),$e(r.pipe(U(i))),le())}var ga=P("#__config"),Et=JSON.parse(ga.textContent);Et.base=`${new URL(Et.base,ve())}`;function we(){return Et}function G(e){return Et.features.includes(e)}function ge(e,t){return typeof t!="undefined"?Et.translations[e].replace("#",t.toString()):Et.translations[e]}function Te(e,t=document){return P(`[data-md-component=${e}]`,t)}function ie(e,t=document){return R(`[data-md-component=${e}]`,t)}function xa(e){let t=P(".md-typeset > :first-child",e);return d(t,"click",{once:!0}).pipe(m(()=>P(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function mn(e){if(!G("announce.dismiss")||!e.childElementCount)return L;if(!e.hidden){let t=P(".md-typeset",e);__md_hash(t.innerHTML)===__md_get("__announce")&&(e.hidden=!0)}return H(()=>{let t=new g;return t.subscribe(({hash:r})=>{e.hidden=!0,__md_set("__announce",r)}),xa(e).pipe(y(r=>t.next(r)),_(()=>t.complete()),m(r=>F({ref:e},r)))})}function ya(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function fn(e,t){let r=new g;return r.subscribe(({hidden:o})=>{e.hidden=o}),ya(e,t).pipe(y(o=>r.next(o)),_(()=>r.complete()),m(o=>F({ref:e},o)))}function Ct(e,t){return t==="inline"?E("div",{class:"md-tooltip md-tooltip--inline",id:e,role:"tooltip"},E("div",{class:"md-tooltip__inner md-typeset"})):E("div",{class:"md-tooltip",id:e,role:"tooltip"},E("div",{class:"md-tooltip__inner md-typeset"}))}function un(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return E("aside",{class:"md-annotation",tabIndex:0},Ct(t),E("a",{href:r,class:"md-annotation__index",tabIndex:-1},E("span",{"data-md-annotation-id":e})))}else return E("aside",{class:"md-annotation",tabIndex:0},Ct(t),E("span",{class:"md-annotation__index",tabIndex:-1},E("span",{"data-md-annotation-id":e})))}function dn(e){return E("button",{class:"md-clipboard md-icon",title:ge("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function Dr(e,t){let r=t&2,o=t&1,n=Object.keys(e.terms).filter(c=>!e.terms[c]).reduce((c,p)=>[...c,E("del",null,p)," "],[]).slice(0,-1),i=we(),a=new URL(e.location,i.base);G("search.highlight")&&a.searchParams.set("h",Object.entries(e.terms).filter(([,c])=>c).reduce((c,[p])=>`${c} ${p}`.trim(),""));let{tags:s}=we();return E("a",{href:`${a}`,class:"md-search-result__link",tabIndex:-1},E("article",{class:"md-search-result__article md-typeset","data-md-score":e.score.toFixed(2)},r>0&&E("div",{class:"md-search-result__icon md-icon"}),r>0&&E("h1",null,e.title),r<=0&&E("h2",null,e.title),o>0&&e.text.length>0&&e.text,e.tags&&e.tags.map(c=>{let p=s?c in s?`md-tag-icon md-tag--${s[c]}`:"md-tag-icon":"";return E("span",{class:`md-tag ${p}`},c)}),o>0&&n.length>0&&E("p",{class:"md-search-result__terms"},ge("search.result.term.missing"),": ",...n)))}function hn(e){let t=e[0].score,r=[...e],o=we(),n=r.findIndex(l=>!`${new URL(l.location,o.base)}`.includes("#")),[i]=r.splice(n,1),a=r.findIndex(l=>l.scoreDr(l,1)),...c.length?[E("details",{class:"md-search-result__more"},E("summary",{tabIndex:-1},E("div",null,c.length>0&&c.length===1?ge("search.result.more.one"):ge("search.result.more.other",c.length))),...c.map(l=>Dr(l,1)))]:[]];return E("li",{class:"md-search-result__item"},p)}function bn(e){return E("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>E("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?ar(r):r)))}function Nr(e){let t=`tabbed-control tabbed-control--${e}`;return E("div",{class:t,hidden:!0},E("button",{class:"tabbed-button",tabIndex:-1,"aria-hidden":"true"}))}function vn(e){return E("div",{class:"md-typeset__scrollwrap"},E("div",{class:"md-typeset__table"},e))}function Ea(e){let t=we(),r=new URL(`../${e.version}/`,t.base);return E("li",{class:"md-version__item"},E("a",{href:`${r}`,class:"md-version__link"},e.title))}function gn(e,t){return e=e.filter(r=>{var o;return!((o=r.properties)!=null&&o.hidden)}),E("div",{class:"md-version"},E("button",{class:"md-version__current","aria-label":ge("select.version")},t.title),E("ul",{class:"md-version__list"},e.map(Ea)))}var wa=0;function Ta(e,t){document.body.append(e);let{width:r}=pe(e);e.style.setProperty("--md-tooltip-width",`${r}px`),e.remove();let o=sr(t),n=typeof o!="undefined"?et(o):$({x:0,y:0}),i=T(vt(t),Vo(t)).pipe(Y());return Q([i,n]).pipe(m(([a,s])=>{let{x:c,y:p}=Ue(t),l=pe(t),f=t.closest("table");return f&&t.parentElement&&(c+=f.offsetLeft+t.parentElement.offsetLeft,p+=f.offsetTop+t.parentElement.offsetTop),{active:a,offset:{x:c-s.x+l.width/2-r/2,y:p-s.y+l.height+8}}}))}function Ge(e){let t=e.title;if(!t.length)return L;let r=`__tooltip_${wa++}`,o=Ct(r,"inline"),n=P(".md-typeset",o);return n.innerHTML=t,H(()=>{let i=new g;return i.subscribe({next({offset:a}){o.style.setProperty("--md-tooltip-x",`${a.x}px`),o.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){o.style.removeProperty("--md-tooltip-x"),o.style.removeProperty("--md-tooltip-y")}}),T(i.pipe(b(({active:a})=>a)),i.pipe(be(250),b(({active:a})=>!a))).subscribe({next({active:a}){a?(e.insertAdjacentElement("afterend",o),e.setAttribute("aria-describedby",r),e.removeAttribute("title")):(o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t))},complete(){o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t)}}),i.pipe(Me(16,de)).subscribe(({active:a})=>{o.classList.toggle("md-tooltip--active",a)}),i.pipe(_t(125,de),b(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?o.style.setProperty("--md-tooltip-0",`${-a}px`):o.style.removeProperty("--md-tooltip-0")},complete(){o.style.removeProperty("--md-tooltip-0")}}),Ta(o,e).pipe(y(a=>i.next(a)),_(()=>i.complete()),m(a=>F({ref:e},a)))}).pipe(ze(ae))}function Sa(e,t){let r=H(()=>Q([zo(e),et(t)])).pipe(m(([{x:o,y:n},i])=>{let{width:a,height:s}=pe(e);return{x:o-i.x+a/2,y:n-i.y+s/2}}));return vt(e).pipe(v(o=>r.pipe(m(n=>({active:o,offset:n})),ye(+!o||1/0))))}function xn(e,t,{target$:r}){let[o,n]=Array.from(e.children);return H(()=>{let i=new g,a=i.pipe(ee(),oe(!0));return i.subscribe({next({offset:s}){e.style.setProperty("--md-tooltip-x",`${s.x}px`),e.style.setProperty("--md-tooltip-y",`${s.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),yt(e).pipe(U(a)).subscribe(s=>{e.toggleAttribute("data-md-visible",s)}),T(i.pipe(b(({active:s})=>s)),i.pipe(be(250),b(({active:s})=>!s))).subscribe({next({active:s}){s?e.prepend(o):o.remove()},complete(){e.prepend(o)}}),i.pipe(Me(16,de)).subscribe(({active:s})=>{o.classList.toggle("md-tooltip--active",s)}),i.pipe(_t(125,de),b(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:s})=>s)).subscribe({next(s){s?e.style.setProperty("--md-tooltip-0",`${-s}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),d(n,"click").pipe(U(a),b(s=>!(s.metaKey||s.ctrlKey))).subscribe(s=>{s.stopPropagation(),s.preventDefault()}),d(n,"mousedown").pipe(U(a),ne(i)).subscribe(([s,{active:c}])=>{var p;if(s.button!==0||s.metaKey||s.ctrlKey)s.preventDefault();else if(c){s.preventDefault();let l=e.parentElement.closest(".md-annotation");l instanceof HTMLElement?l.focus():(p=Re())==null||p.blur()}}),r.pipe(U(a),b(s=>s===o),Ye(125)).subscribe(()=>e.focus()),Sa(e,t).pipe(y(s=>i.next(s)),_(()=>i.complete()),m(s=>F({ref:e},s)))})}function Oa(e){return e.tagName==="CODE"?R(".c, .c1, .cm",e):[e]}function Ma(e){let t=[];for(let r of Oa(e)){let o=[],n=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=n.nextNode();i;i=n.nextNode())o.push(i);for(let i of o){let a;for(;a=/(\(\d+\))(!)?/.exec(i.textContent);){let[,s,c]=a;if(typeof c=="undefined"){let p=i.splitText(a.index);i=p.splitText(s.length),t.push(p)}else{i.textContent=s,t.push(i);break}}}}return t}function yn(e,t){t.append(...Array.from(e.childNodes))}function lr(e,t,{target$:r,print$:o}){let n=t.closest("[id]"),i=n==null?void 0:n.id,a=new Map;for(let s of Ma(t)){let[,c]=s.textContent.match(/\((\d+)\)/);me(`:scope > li:nth-child(${c})`,e)&&(a.set(c,un(c,i)),s.replaceWith(a.get(c)))}return a.size===0?L:H(()=>{let s=new g,c=s.pipe(ee(),oe(!0)),p=[];for(let[l,f]of a)p.push([P(".md-typeset",f),P(`:scope > li:nth-child(${l})`,e)]);return o.pipe(U(c)).subscribe(l=>{e.hidden=!l,e.classList.toggle("md-annotation-list",l);for(let[f,u]of p)l?yn(f,u):yn(u,f)}),T(...[...a].map(([,l])=>xn(l,t,{target$:r}))).pipe(_(()=>s.complete()),le())})}function En(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return En(t)}}function wn(e,t){return H(()=>{let r=En(e);return typeof r!="undefined"?lr(r,e,t):L})}var Tn=jt(zr());var La=0;function Sn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return Sn(t)}}function _a(e){return Ee(e).pipe(m(({width:t})=>({scrollable:xt(e).width>t})),X("scrollable"))}function On(e,t){let{matches:r}=matchMedia("(hover)"),o=H(()=>{let n=new g,i=n.pipe($r(1));n.subscribe(({scrollable:c})=>{c&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")});let a=[];if(Tn.default.isSupported()&&(e.closest(".copy")||G("content.code.copy")&&!e.closest(".no-copy"))){let c=e.closest("pre");c.id=`__code_${La++}`;let p=dn(c.id);c.insertBefore(p,e),G("content.tooltips")&&a.push(Ge(p))}let s=e.closest(".highlight");if(s instanceof HTMLElement){let c=Sn(s);if(typeof c!="undefined"&&(s.classList.contains("annotate")||G("content.code.annotate"))){let p=lr(c,e,t);a.push(Ee(s).pipe(U(i),m(({width:l,height:f})=>l&&f),Y(),v(l=>l?p:L)))}}return _a(e).pipe(y(c=>n.next(c)),_(()=>n.complete()),m(c=>F({ref:e},c)),$e(...a))});return G("content.lazy")?yt(e).pipe(b(n=>n),ye(1),v(()=>o)):o}function Aa(e,{target$:t,print$:r}){let o=!0;return T(t.pipe(m(n=>n.closest("details:not([open])")),b(n=>e===n),m(()=>({action:"open",reveal:!0}))),r.pipe(b(n=>n||!o),y(()=>o=e.open),m(n=>({action:n?"open":"close"}))))}function Mn(e,t){return H(()=>{let r=new g;return r.subscribe(({action:o,reveal:n})=>{e.toggleAttribute("open",o==="open"),n&&e.scrollIntoView()}),Aa(e,t).pipe(y(o=>r.next(o)),_(()=>r.complete()),m(o=>F({ref:e},o)))})}var Ln=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color);stroke-width:.05rem}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}g #flowchart-circleEnd,g #flowchart-circleStart,g #flowchart-crossEnd,g #flowchart-crossStart,g #flowchart-pointEnd,g #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel,.nodeLabel p{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.attributeBoxEven,.attributeBoxOdd{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{fill:var(--md-mermaid-sequence-actor-bg-color);stroke:var(--md-mermaid-sequence-actor-border-color)}text.actor>tspan{fill:var(--md-mermaid-sequence-actor-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-mermaid-sequence-actor-line-color)}.actor-man circle,.actor-man line{fill:var(--md-mermaid-sequence-actorman-bg-color);stroke:var(--md-mermaid-sequence-actorman-line-color)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-sequence-message-line-color)}.note{fill:var(--md-mermaid-sequence-note-bg-color);stroke:var(--md-mermaid-sequence-note-border-color)}.loopText,.loopText>tspan,.messageText,.noteText>tspan{stroke:none;font-family:var(--md-mermaid-font-family)!important}.messageText{fill:var(--md-mermaid-sequence-message-fg-color)}.loopText,.loopText>tspan{fill:var(--md-mermaid-sequence-loop-fg-color)}.noteText>tspan{fill:var(--md-mermaid-sequence-note-fg-color)}#arrowhead path{fill:var(--md-mermaid-sequence-message-line-color);stroke:none}.loopLine{fill:var(--md-mermaid-sequence-loop-bg-color);stroke:var(--md-mermaid-sequence-loop-border-color)}.labelBox{fill:var(--md-mermaid-sequence-label-bg-color);stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-sequence-label-fg-color);font-family:var(--md-mermaid-font-family)}.sequenceNumber{fill:var(--md-mermaid-sequence-number-fg-color)}rect.rect{fill:var(--md-mermaid-sequence-box-bg-color);stroke:none}rect.rect+text.text{fill:var(--md-mermaid-sequence-box-fg-color)}defs #sequencenumber{fill:var(--md-mermaid-sequence-number-bg-color)!important}";var qr,ka=0;function Ha(){return typeof mermaid=="undefined"||mermaid instanceof Element?gt("https://unpkg.com/mermaid@10.7.0/dist/mermaid.min.js"):$(void 0)}function _n(e){return e.classList.remove("mermaid"),qr||(qr=Ha().pipe(y(()=>mermaid.initialize({startOnLoad:!1,themeCSS:Ln,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),B(1))),qr.subscribe(()=>ro(this,null,function*(){e.classList.add("mermaid");let t=`__mermaid_${ka++}`,r=E("div",{class:"mermaid"}),o=e.textContent,{svg:n,fn:i}=yield mermaid.render(t,o),a=r.attachShadow({mode:"closed"});a.innerHTML=n,e.replaceWith(r),i==null||i(a)})),qr.pipe(m(()=>({ref:e})))}var An=E("table");function Cn(e){return e.replaceWith(An),An.replaceWith(vn(e)),$({ref:e})}function $a(e){let t=e.find(r=>r.checked)||e[0];return T(...e.map(r=>d(r,"change").pipe(m(()=>P(`label[for="${r.id}"]`))))).pipe(q(P(`label[for="${t.id}"]`)),m(r=>({active:r})))}function kn(e,{viewport$:t,target$:r}){let o=P(".tabbed-labels",e),n=R(":scope > input",e),i=Nr("prev");e.append(i);let a=Nr("next");return e.append(a),H(()=>{let s=new g,c=s.pipe(ee(),oe(!0));Q([s,Ee(e)]).pipe(U(c),Me(1,de)).subscribe({next([{active:p},l]){let f=Ue(p),{width:u}=pe(p);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let h=ir(o);(f.xh.x+l.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),Q([et(o),Ee(o)]).pipe(U(c)).subscribe(([p,l])=>{let f=xt(o);i.hidden=p.x<16,a.hidden=p.x>f.width-l.width-16}),T(d(i,"click").pipe(m(()=>-1)),d(a,"click").pipe(m(()=>1))).pipe(U(c)).subscribe(p=>{let{width:l}=pe(o);o.scrollBy({left:l*p,behavior:"smooth"})}),r.pipe(U(c),b(p=>n.includes(p))).subscribe(p=>p.click()),o.classList.add("tabbed-labels--linked");for(let p of n){let l=P(`label[for="${p.id}"]`);l.replaceChildren(E("a",{href:`#${l.htmlFor}`,tabIndex:-1},...Array.from(l.childNodes))),d(l.firstElementChild,"click").pipe(U(c),b(f=>!(f.metaKey||f.ctrlKey)),y(f=>{f.preventDefault(),f.stopPropagation()})).subscribe(()=>{history.replaceState({},"",`#${l.htmlFor}`),l.click()})}return G("content.tabs.link")&&s.pipe(Le(1),ne(t)).subscribe(([{active:p},{offset:l}])=>{let f=p.innerText.trim();if(p.hasAttribute("data-md-switching"))p.removeAttribute("data-md-switching");else{let u=e.offsetTop-l.y;for(let w of R("[data-tabs]"))for(let A of R(":scope > input",w)){let Z=P(`label[for="${A.id}"]`);if(Z!==p&&Z.innerText.trim()===f){Z.setAttribute("data-md-switching",""),A.click();break}}window.scrollTo({top:e.offsetTop-u});let h=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...h])])}}),s.pipe(U(c)).subscribe(()=>{for(let p of R("audio, video",e))p.pause()}),$a(n).pipe(y(p=>s.next(p)),_(()=>s.complete()),m(p=>F({ref:e},p)))}).pipe(ze(ae))}function Hn(e,{viewport$:t,target$:r,print$:o}){return T(...R(".annotate:not(.highlight)",e).map(n=>wn(n,{target$:r,print$:o})),...R("pre:not(.mermaid) > code",e).map(n=>On(n,{target$:r,print$:o})),...R("pre.mermaid",e).map(n=>_n(n)),...R("table:not([class])",e).map(n=>Cn(n)),...R("details",e).map(n=>Mn(n,{target$:r,print$:o})),...R("[data-tabs]",e).map(n=>kn(n,{viewport$:t,target$:r})),...R("[title]",e).filter(()=>G("content.tooltips")).map(n=>Ge(n)))}function Ra(e,{alert$:t}){return t.pipe(v(r=>T($(!0),$(!1).pipe(Ye(2e3))).pipe(m(o=>({message:r,active:o})))))}function $n(e,t){let r=P(".md-typeset",e);return H(()=>{let o=new g;return o.subscribe(({message:n,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=n}),Ra(e,t).pipe(y(n=>o.next(n)),_(()=>o.complete()),m(n=>F({ref:e},n)))})}function Pa({viewport$:e}){if(!G("header.autohide"))return $(!1);let t=e.pipe(m(({offset:{y:n}})=>n),Ke(2,1),m(([n,i])=>[nMath.abs(i-n.y)>100),m(([,[n]])=>n),Y()),o=We("search");return Q([e,o]).pipe(m(([{offset:n},i])=>n.y>400&&!i),Y(),v(n=>n?r:$(!1)),q(!1))}function Rn(e,t){return H(()=>Q([Ee(e),Pa(t)])).pipe(m(([{height:r},o])=>({height:r,hidden:o})),Y((r,o)=>r.height===o.height&&r.hidden===o.hidden),B(1))}function Pn(e,{header$:t,main$:r}){return H(()=>{let o=new g,n=o.pipe(ee(),oe(!0));o.pipe(X("active"),je(t)).subscribe(([{active:a},{hidden:s}])=>{e.classList.toggle("md-header--shadow",a&&!s),e.hidden=s});let i=fe(R("[title]",e)).pipe(b(()=>G("content.tooltips")),re(a=>Ge(a)));return r.subscribe(o),t.pipe(U(n),m(a=>F({ref:e},a)),$e(i.pipe(U(n))))})}function Ia(e,{viewport$:t,header$:r}){return pr(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:o}})=>{let{height:n}=pe(e);return{active:o>=n}}),X("active"))}function In(e,t){return H(()=>{let r=new g;r.subscribe({next({active:n}){e.classList.toggle("md-header__title--active",n)},complete(){e.classList.remove("md-header__title--active")}});let o=me(".md-content h1");return typeof o=="undefined"?L:Ia(o,t).pipe(y(n=>r.next(n)),_(()=>r.complete()),m(n=>F({ref:e},n)))})}function Fn(e,{viewport$:t,header$:r}){let o=r.pipe(m(({height:i})=>i),Y()),n=o.pipe(v(()=>Ee(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),X("bottom"))));return Q([o,n,t]).pipe(m(([i,{top:a,bottom:s},{offset:{y:c},size:{height:p}}])=>(p=Math.max(0,p-Math.max(0,a-c,i)-Math.max(0,p+c-s)),{offset:a-i,height:p,active:a-i<=c})),Y((i,a)=>i.offset===a.offset&&i.height===a.height&&i.active===a.active))}function Fa(e){let t=__md_get("__palette")||{index:e.findIndex(o=>matchMedia(o.getAttribute("data-md-color-media")).matches)},r=Math.max(0,Math.min(t.index,e.length-1));return $(...e).pipe(re(o=>d(o,"change").pipe(m(()=>o))),q(e[r]),m(o=>({index:e.indexOf(o),color:{media:o.getAttribute("data-md-color-media"),scheme:o.getAttribute("data-md-color-scheme"),primary:o.getAttribute("data-md-color-primary"),accent:o.getAttribute("data-md-color-accent")}})),B(1))}function jn(e){let t=R("input",e),r=E("meta",{name:"theme-color"});document.head.appendChild(r);let o=E("meta",{name:"color-scheme"});document.head.appendChild(o);let n=At("(prefers-color-scheme: light)");return H(()=>{let i=new g;return i.subscribe(a=>{if(document.body.setAttribute("data-md-color-switching",""),a.color.media==="(prefers-color-scheme)"){let s=matchMedia("(prefers-color-scheme: light)"),c=document.querySelector(s.matches?"[data-md-color-media='(prefers-color-scheme: light)']":"[data-md-color-media='(prefers-color-scheme: dark)']");a.color.scheme=c.getAttribute("data-md-color-scheme"),a.color.primary=c.getAttribute("data-md-color-primary"),a.color.accent=c.getAttribute("data-md-color-accent")}for(let[s,c]of Object.entries(a.color))document.body.setAttribute(`data-md-color-${s}`,c);for(let s=0;sa.key==="Enter"),ne(i,(a,s)=>s)).subscribe(({index:a})=>{a=(a+1)%t.length,t[a].click(),t[a].focus()}),i.pipe(m(()=>{let a=Te("header"),s=window.getComputedStyle(a);return o.content=s.colorScheme,s.backgroundColor.match(/\d+/g).map(c=>(+c).toString(16).padStart(2,"0")).join("")})).subscribe(a=>r.content=`#${a}`),i.pipe(Oe(ae)).subscribe(()=>{document.body.removeAttribute("data-md-color-switching")}),Fa(t).pipe(U(n.pipe(Le(1))),at(),y(a=>i.next(a)),_(()=>i.complete()),m(a=>F({ref:e},a)))})}function Un(e,{progress$:t}){return H(()=>{let r=new g;return r.subscribe(({value:o})=>{e.style.setProperty("--md-progress-value",`${o}`)}),t.pipe(y(o=>r.next({value:o})),_(()=>r.complete()),m(o=>({ref:e,value:o})))})}var Kr=jt(zr());function ja(e){e.setAttribute("data-md-copying","");let t=e.closest("[data-copy]"),r=t?t.getAttribute("data-copy"):e.innerText;return e.removeAttribute("data-md-copying"),r.trimEnd()}function Wn({alert$:e}){Kr.default.isSupported()&&new j(t=>{new Kr.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||ja(P(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(y(t=>{t.trigger.focus()}),m(()=>ge("clipboard.copied"))).subscribe(e)}function Dn(e,t){return e.protocol=t.protocol,e.hostname=t.hostname,e}function Ua(e,t){let r=new Map;for(let o of R("url",e)){let n=P("loc",o),i=[Dn(new URL(n.textContent),t)];r.set(`${i[0]}`,i);for(let a of R("[rel=alternate]",o)){let s=a.getAttribute("href");s!=null&&i.push(Dn(new URL(s),t))}}return r}function mr(e){return on(new URL("sitemap.xml",e)).pipe(m(t=>Ua(t,new URL(e))),he(()=>$(new Map)))}function Wa(e,t){if(!(e.target instanceof Element))return L;let r=e.target.closest("a");if(r===null)return L;if(r.target||e.metaKey||e.ctrlKey)return L;let o=new URL(r.href);return o.search=o.hash="",t.has(`${o}`)?(e.preventDefault(),$(new URL(r.href))):L}function Nn(e){let t=new Map;for(let r of R(":scope > *",e.head))t.set(r.outerHTML,r);return t}function Vn(e){for(let t of R("[href], [src]",e))for(let r of["href","src"]){let o=t.getAttribute(r);if(o&&!/^(?:[a-z]+:)?\/\//i.test(o)){t[r]=t[r];break}}return $(e)}function Da(e){for(let o of["[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...G("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let n=me(o),i=me(o,e);typeof n!="undefined"&&typeof i!="undefined"&&n.replaceWith(i)}let t=Nn(document);for(let[o,n]of Nn(e))t.has(o)?t.delete(o):document.head.appendChild(n);for(let o of t.values()){let n=o.getAttribute("name");n!=="theme-color"&&n!=="color-scheme"&&o.remove()}let r=Te("container");return Fe(R("script",r)).pipe(v(o=>{let n=e.createElement("script");if(o.src){for(let i of o.getAttributeNames())n.setAttribute(i,o.getAttribute(i));return o.replaceWith(n),new j(i=>{n.onload=()=>i.complete()})}else return n.textContent=o.textContent,o.replaceWith(n),L}),ee(),oe(document))}function zn({location$:e,viewport$:t,progress$:r}){let o=we();if(location.protocol==="file:")return L;let n=mr(o.base);$(document).subscribe(Vn);let i=d(document.body,"click").pipe(je(n),v(([c,p])=>Wa(c,p)),le()),a=d(window,"popstate").pipe(m(ve),le());i.pipe(ne(t)).subscribe(([c,{offset:p}])=>{history.replaceState(p,""),history.pushState(null,"",c)}),T(i,a).subscribe(e);let s=e.pipe(X("pathname"),v(c=>rn(c,{progress$:r}).pipe(he(()=>(st(c,!0),L)))),v(Vn),v(Da),le());return T(s.pipe(ne(e,(c,p)=>p)),e.pipe(X("pathname"),v(()=>e),X("hash")),e.pipe(Y((c,p)=>c.pathname===p.pathname&&c.hash===p.hash),v(()=>i),y(()=>history.back()))).subscribe(c=>{var p,l;history.state!==null||!c.hash?window.scrollTo(0,(l=(p=history.state)==null?void 0:p.y)!=null?l:0):(history.scrollRestoration="auto",Zo(c.hash),history.scrollRestoration="manual")}),e.subscribe(()=>{history.scrollRestoration="manual"}),d(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}),t.pipe(X("offset"),be(100)).subscribe(({offset:c})=>{history.replaceState(c,"")}),s}var Qn=jt(Kn());function Yn(e){let t=e.separator.split("|").map(n=>n.replace(/(\(\?[!=<][^)]+\))/g,"").length===0?"\uFFFD":n).join("|"),r=new RegExp(t,"img"),o=(n,i,a)=>`${i}${a}`;return n=>{n=n.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator}|)(${n.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return a=>(0,Qn.default)(a).replace(i,o).replace(/<\/mark>(\s+)]*>/img,"$1")}}function Ht(e){return e.type===1}function fr(e){return e.type===3}function Bn(e,t){let r=ln(e);return T($(location.protocol!=="file:"),We("search")).pipe(He(o=>o),v(()=>t)).subscribe(({config:o,docs:n})=>r.next({type:0,data:{config:o,docs:n,options:{suggest:G("search.suggest")}}})),r}function Gn({document$:e}){let t=we(),r=De(new URL("../versions.json",t.base)).pipe(he(()=>L)),o=r.pipe(m(n=>{let[,i]=t.base.match(/([^/]+)\/?$/);return n.find(({version:a,aliases:s})=>a===i||s.includes(i))||n[0]}));r.pipe(m(n=>new Map(n.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),v(n=>d(document.body,"click").pipe(b(i=>!i.metaKey&&!i.ctrlKey),ne(o),v(([i,a])=>{if(i.target instanceof Element){let s=i.target.closest("a");if(s&&!s.target&&n.has(s.href)){let c=s.href;return!i.target.closest(".md-version")&&n.get(c)===a?L:(i.preventDefault(),$(c))}}return L}),v(i=>{let{version:a}=n.get(i);return mr(new URL(i)).pipe(m(s=>{let p=ve().href.replace(t.base,"");return s.has(p.split("#")[0])?new URL(`../${a}/${p}`,t.base):new URL(i)}))})))).subscribe(n=>st(n,!0)),Q([r,o]).subscribe(([n,i])=>{P(".md-header__topic").appendChild(gn(n,i))}),e.pipe(v(()=>o)).subscribe(n=>{var a;let i=__md_get("__outdated",sessionStorage);if(i===null){i=!0;let s=((a=t.version)==null?void 0:a.default)||"latest";Array.isArray(s)||(s=[s]);e:for(let c of s)for(let p of n.aliases.concat(n.version))if(new RegExp(c,"i").test(p)){i=!1;break e}__md_set("__outdated",i,sessionStorage)}if(i)for(let s of ie("outdated"))s.hidden=!1})}function Ka(e,{worker$:t}){let{searchParams:r}=ve();r.has("q")&&(Be("search",!0),e.value=r.get("q"),e.focus(),We("search").pipe(He(i=>!i)).subscribe(()=>{let i=ve();i.searchParams.delete("q"),history.replaceState({},"",`${i}`)}));let o=vt(e),n=T(t.pipe(He(Ht)),d(e,"keyup"),o).pipe(m(()=>e.value),Y());return Q([n,o]).pipe(m(([i,a])=>({value:i,focus:a})),B(1))}function Jn(e,{worker$:t}){let r=new g,o=r.pipe(ee(),oe(!0));Q([t.pipe(He(Ht)),r],(i,a)=>a).pipe(X("value")).subscribe(({value:i})=>t.next({type:2,data:i})),r.pipe(X("focus")).subscribe(({focus:i})=>{i&&Be("search",i)}),d(e.form,"reset").pipe(U(o)).subscribe(()=>e.focus());let n=P("header [for=__search]");return d(n,"click").subscribe(()=>e.focus()),Ka(e,{worker$:t}).pipe(y(i=>r.next(i)),_(()=>r.complete()),m(i=>F({ref:e},i)),B(1))}function Xn(e,{worker$:t,query$:r}){let o=new g,n=Yo(e.parentElement).pipe(b(Boolean)),i=e.parentElement,a=P(":scope > :first-child",e),s=P(":scope > :last-child",e);We("search").subscribe(l=>s.setAttribute("role",l?"list":"presentation")),o.pipe(ne(r),Ir(t.pipe(He(Ht)))).subscribe(([{items:l},{value:f}])=>{switch(l.length){case 0:a.textContent=f.length?ge("search.result.none"):ge("search.result.placeholder");break;case 1:a.textContent=ge("search.result.one");break;default:let u=ar(l.length);a.textContent=ge("search.result.other",u)}});let c=o.pipe(y(()=>s.innerHTML=""),v(({items:l})=>T($(...l.slice(0,10)),$(...l.slice(10)).pipe(Ke(4),jr(n),v(([f])=>f)))),m(hn),le());return c.subscribe(l=>s.appendChild(l)),c.pipe(re(l=>{let f=me("details",l);return typeof f=="undefined"?L:d(f,"toggle").pipe(U(o),m(()=>f))})).subscribe(l=>{l.open===!1&&l.offsetTop<=i.scrollTop&&i.scrollTo({top:l.offsetTop})}),t.pipe(b(fr),m(({data:l})=>l)).pipe(y(l=>o.next(l)),_(()=>o.complete()),m(l=>F({ref:e},l)))}function Qa(e,{query$:t}){return t.pipe(m(({value:r})=>{let o=ve();return o.hash="",r=r.replace(/\s+/g,"+").replace(/&/g,"%26").replace(/=/g,"%3D"),o.search=`q=${r}`,{url:o}}))}function Zn(e,t){let r=new g,o=r.pipe(ee(),oe(!0));return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),d(e,"click").pipe(U(o)).subscribe(n=>n.preventDefault()),Qa(e,t).pipe(y(n=>r.next(n)),_(()=>r.complete()),m(n=>F({ref:e},n)))}function ei(e,{worker$:t,keyboard$:r}){let o=new g,n=Te("search-query"),i=T(d(n,"keydown"),d(n,"focus")).pipe(Oe(ae),m(()=>n.value),Y());return o.pipe(je(i),m(([{suggest:s},c])=>{let p=c.split(/([\s-]+)/);if(s!=null&&s.length&&p[p.length-1]){let l=s[s.length-1];l.startsWith(p[p.length-1])&&(p[p.length-1]=l)}else p.length=0;return p})).subscribe(s=>e.innerHTML=s.join("").replace(/\s/g," ")),r.pipe(b(({mode:s})=>s==="search")).subscribe(s=>{switch(s.type){case"ArrowRight":e.innerText.length&&n.selectionStart===n.value.length&&(n.value=e.innerText);break}}),t.pipe(b(fr),m(({data:s})=>s)).pipe(y(s=>o.next(s)),_(()=>o.complete()),m(()=>({ref:e})))}function ti(e,{index$:t,keyboard$:r}){let o=we();try{let n=Bn(o.search,t),i=Te("search-query",e),a=Te("search-result",e);d(e,"click").pipe(b(({target:c})=>c instanceof Element&&!!c.closest("a"))).subscribe(()=>Be("search",!1)),r.pipe(b(({mode:c})=>c==="search")).subscribe(c=>{let p=Re();switch(c.type){case"Enter":if(p===i){let l=new Map;for(let f of R(":first-child [href]",a)){let u=f.firstElementChild;l.set(f,parseFloat(u.getAttribute("data-md-score")))}if(l.size){let[[f]]=[...l].sort(([,u],[,h])=>h-u);f.click()}c.claim()}break;case"Escape":case"Tab":Be("search",!1),i.blur();break;case"ArrowUp":case"ArrowDown":if(typeof p=="undefined")i.focus();else{let l=[i,...R(":not(details) > [href], summary, details[open] [href]",a)],f=Math.max(0,(Math.max(0,l.indexOf(p))+l.length+(c.type==="ArrowUp"?-1:1))%l.length);l[f].focus()}c.claim();break;default:i!==Re()&&i.focus()}}),r.pipe(b(({mode:c})=>c==="global")).subscribe(c=>{switch(c.type){case"f":case"s":case"/":i.focus(),i.select(),c.claim();break}});let s=Jn(i,{worker$:n});return T(s,Xn(a,{worker$:n,query$:s})).pipe($e(...ie("search-share",e).map(c=>Zn(c,{query$:s})),...ie("search-suggest",e).map(c=>ei(c,{worker$:n,keyboard$:r}))))}catch(n){return e.hidden=!0,qe}}function ri(e,{index$:t,location$:r}){return Q([t,r.pipe(q(ve()),b(o=>!!o.searchParams.get("h")))]).pipe(m(([o,n])=>Yn(o.config)(n.searchParams.get("h"))),m(o=>{var a;let n=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let s=i.nextNode();s;s=i.nextNode())if((a=s.parentElement)!=null&&a.offsetHeight){let c=s.textContent,p=o(c);p.length>c.length&&n.set(s,p)}for(let[s,c]of n){let{childNodes:p}=E("span",null,c);s.replaceWith(...Array.from(p))}return{ref:e,nodes:n}}))}function Ya(e,{viewport$:t,main$:r}){let o=e.closest(".md-grid"),n=o.offsetTop-o.parentElement.offsetTop;return Q([r,t]).pipe(m(([{offset:i,height:a},{offset:{y:s}}])=>(a=a+Math.min(n,Math.max(0,s-i))-n,{height:a,locked:s>=i+n})),Y((i,a)=>i.height===a.height&&i.locked===a.locked))}function Qr(e,o){var n=o,{header$:t}=n,r=to(n,["header$"]);let i=P(".md-sidebar__scrollwrap",e),{y:a}=Ue(i);return H(()=>{let s=new g,c=s.pipe(ee(),oe(!0)),p=s.pipe(Me(0,de));return p.pipe(ne(t)).subscribe({next([{height:l},{height:f}]){i.style.height=`${l-2*a}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),p.pipe(He()).subscribe(()=>{for(let l of R(".md-nav__link--active[href]",e)){if(!l.clientHeight)continue;let f=l.closest(".md-sidebar__scrollwrap");if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:h}=pe(f);f.scrollTo({top:u-h/2})}}}),fe(R("label[tabindex]",e)).pipe(re(l=>d(l,"click").pipe(Oe(ae),m(()=>l),U(c)))).subscribe(l=>{let f=P(`[id="${l.htmlFor}"]`);P(`[aria-labelledby="${l.id}"]`).setAttribute("aria-expanded",`${f.checked}`)}),Ya(e,r).pipe(y(l=>s.next(l)),_(()=>s.complete()),m(l=>F({ref:e},l)))})}function oi(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return Lt(De(`${r}/releases/latest`).pipe(he(()=>L),m(o=>({version:o.tag_name})),Qe({})),De(r).pipe(he(()=>L),m(o=>({stars:o.stargazers_count,forks:o.forks_count})),Qe({}))).pipe(m(([o,n])=>F(F({},o),n)))}else{let r=`https://api.github.com/users/${e}`;return De(r).pipe(m(o=>({repositories:o.public_repos})),Qe({}))}}function ni(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return De(r).pipe(he(()=>L),m(({star_count:o,forks_count:n})=>({stars:o,forks:n})),Qe({}))}function ii(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,o]=t;return oi(r,o)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,o]=t;return ni(r,o)}return L}var Ba;function Ga(e){return Ba||(Ba=H(()=>{let t=__md_get("__source",sessionStorage);if(t)return $(t);if(ie("consent").length){let o=__md_get("__consent");if(!(o&&o.github))return L}return ii(e.href).pipe(y(o=>__md_set("__source",o,sessionStorage)))}).pipe(he(()=>L),b(t=>Object.keys(t).length>0),m(t=>({facts:t})),B(1)))}function ai(e){let t=P(":scope > :last-child",e);return H(()=>{let r=new g;return r.subscribe(({facts:o})=>{t.appendChild(bn(o)),t.classList.add("md-source__repository--active")}),Ga(e).pipe(y(o=>r.next(o)),_(()=>r.complete()),m(o=>F({ref:e},o)))})}function Ja(e,{viewport$:t,header$:r}){return Ee(document.body).pipe(v(()=>pr(e,{header$:r,viewport$:t})),m(({offset:{y:o}})=>({hidden:o>=10})),X("hidden"))}function si(e,t){return H(()=>{let r=new g;return r.subscribe({next({hidden:o}){e.hidden=o},complete(){e.hidden=!1}}),(G("navigation.tabs.sticky")?$({hidden:!1}):Ja(e,t)).pipe(y(o=>r.next(o)),_(()=>r.complete()),m(o=>F({ref:e},o)))})}function Xa(e,{viewport$:t,header$:r}){let o=new Map,n=R(".md-nav__link",e);for(let s of n){let c=decodeURIComponent(s.hash.substring(1)),p=me(`[id="${c}"]`);typeof p!="undefined"&&o.set(s,p)}let i=r.pipe(X("height"),m(({height:s})=>{let c=Te("main"),p=P(":scope > :first-child",c);return s+.8*(p.offsetTop-c.offsetTop)}),le());return Ee(document.body).pipe(X("height"),v(s=>H(()=>{let c=[];return $([...o].reduce((p,[l,f])=>{for(;c.length&&o.get(c[c.length-1]).tagName>=f.tagName;)c.pop();let u=f.offsetTop;for(;!u&&f.parentElement;)f=f.parentElement,u=f.offsetTop;let h=f.offsetParent;for(;h;h=h.offsetParent)u+=h.offsetTop;return p.set([...c=[...c,l]].reverse(),u)},new Map))}).pipe(m(c=>new Map([...c].sort(([,p],[,l])=>p-l))),je(i),v(([c,p])=>t.pipe(Rr(([l,f],{offset:{y:u},size:h})=>{let w=u+h.height>=Math.floor(s.height);for(;f.length;){let[,A]=f[0];if(A-p=u&&!w)f=[l.pop(),...f];else break}return[l,f]},[[],[...c]]),Y((l,f)=>l[0]===f[0]&&l[1]===f[1])))))).pipe(m(([s,c])=>({prev:s.map(([p])=>p),next:c.map(([p])=>p)})),q({prev:[],next:[]}),Ke(2,1),m(([s,c])=>s.prev.length{let i=new g,a=i.pipe(ee(),oe(!0));if(i.subscribe(({prev:s,next:c})=>{for(let[p]of c)p.classList.remove("md-nav__link--passed"),p.classList.remove("md-nav__link--active");for(let[p,[l]]of s.entries())l.classList.add("md-nav__link--passed"),l.classList.toggle("md-nav__link--active",p===s.length-1)}),G("toc.follow")){let s=T(t.pipe(be(1),m(()=>{})),t.pipe(be(250),m(()=>"smooth")));i.pipe(b(({prev:c})=>c.length>0),je(o.pipe(Oe(ae))),ne(s)).subscribe(([[{prev:c}],p])=>{let[l]=c[c.length-1];if(l.offsetHeight){let f=sr(l);if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:h}=pe(f);f.scrollTo({top:u-h/2,behavior:p})}}})}return G("navigation.tracking")&&t.pipe(U(a),X("offset"),be(250),Le(1),U(n.pipe(Le(1))),at({delay:250}),ne(i)).subscribe(([,{prev:s}])=>{let c=ve(),p=s[s.length-1];if(p&&p.length){let[l]=p,{hash:f}=new URL(l.href);c.hash!==f&&(c.hash=f,history.replaceState({},"",`${c}`))}else c.hash="",history.replaceState({},"",`${c}`)}),Xa(e,{viewport$:t,header$:r}).pipe(y(s=>i.next(s)),_(()=>i.complete()),m(s=>F({ref:e},s)))})}function Za(e,{viewport$:t,main$:r,target$:o}){let n=t.pipe(m(({offset:{y:a}})=>a),Ke(2,1),m(([a,s])=>a>s&&s>0),Y()),i=r.pipe(m(({active:a})=>a));return Q([i,n]).pipe(m(([a,s])=>!(a&&s)),Y(),U(o.pipe(Le(1))),oe(!0),at({delay:250}),m(a=>({hidden:a})))}function pi(e,{viewport$:t,header$:r,main$:o,target$:n}){let i=new g,a=i.pipe(ee(),oe(!0));return i.subscribe({next({hidden:s}){e.hidden=s,s?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(U(a),X("height")).subscribe(({height:s})=>{e.style.top=`${s+16}px`}),d(e,"click").subscribe(s=>{s.preventDefault(),window.scrollTo({top:0})}),Za(e,{viewport$:t,main$:o,target$:n}).pipe(y(s=>i.next(s)),_(()=>i.complete()),m(s=>F({ref:e},s)))}function li({document$:e}){e.pipe(v(()=>R(".md-ellipsis")),re(t=>yt(t).pipe(U(e.pipe(Le(1))),b(r=>r),m(()=>t),ye(1))),b(t=>t.offsetWidth{let r=t.innerText,o=t.closest("a")||t;return o.title=r,Ge(o).pipe(U(e.pipe(Le(1))),_(()=>o.removeAttribute("title")))})).subscribe(),e.pipe(v(()=>R(".md-status")),re(t=>Ge(t))).subscribe()}function mi({document$:e,tablet$:t}){e.pipe(v(()=>R(".md-toggle--indeterminate")),y(r=>{r.indeterminate=!0,r.checked=!1}),re(r=>d(r,"change").pipe(Fr(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),ne(t)).subscribe(([r,o])=>{r.classList.remove("md-toggle--indeterminate"),o&&(r.checked=!1)})}function es(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function fi({document$:e}){e.pipe(v(()=>R("[data-md-scrollfix]")),y(t=>t.removeAttribute("data-md-scrollfix")),b(es),re(t=>d(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function ui({viewport$:e,tablet$:t}){Q([We("search"),t]).pipe(m(([r,o])=>r&&!o),v(r=>$(r).pipe(Ye(r?400:100))),ne(e)).subscribe(([r,{offset:{y:o}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${o}px`;else{let n=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",n&&window.scrollTo(0,n)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let o=e[r];typeof o=="string"?o=document.createTextNode(o):o.parentNode&&o.parentNode.removeChild(o),r?t.insertBefore(this.previousSibling,o):t.replaceChild(o,this)}}}));function ts(){return location.protocol==="file:"?gt(`${new URL("search/search_index.js",Yr.base)}`).pipe(m(()=>__index),B(1)):De(new URL("search/search_index.json",Yr.base))}document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var rt=No(),Rt=Jo(),wt=en(Rt),Br=Go(),_e=pn(),ur=At("(min-width: 960px)"),hi=At("(min-width: 1220px)"),bi=tn(),Yr=we(),vi=document.forms.namedItem("search")?ts():qe,Gr=new g;Wn({alert$:Gr});var Jr=new g;G("navigation.instant")&&zn({location$:Rt,viewport$:_e,progress$:Jr}).subscribe(rt);var di;((di=Yr.version)==null?void 0:di.provider)==="mike"&&Gn({document$:rt});T(Rt,wt).pipe(Ye(125)).subscribe(()=>{Be("drawer",!1),Be("search",!1)});Br.pipe(b(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=me("link[rel=prev]");typeof t!="undefined"&&st(t);break;case"n":case".":let r=me("link[rel=next]");typeof r!="undefined"&&st(r);break;case"Enter":let o=Re();o instanceof HTMLLabelElement&&o.click()}});li({document$:rt});mi({document$:rt,tablet$:ur});fi({document$:rt});ui({viewport$:_e,tablet$:ur});var tt=Rn(Te("header"),{viewport$:_e}),$t=rt.pipe(m(()=>Te("main")),v(e=>Fn(e,{viewport$:_e,header$:tt})),B(1)),rs=T(...ie("consent").map(e=>fn(e,{target$:wt})),...ie("dialog").map(e=>$n(e,{alert$:Gr})),...ie("header").map(e=>Pn(e,{viewport$:_e,header$:tt,main$:$t})),...ie("palette").map(e=>jn(e)),...ie("progress").map(e=>Un(e,{progress$:Jr})),...ie("search").map(e=>ti(e,{index$:vi,keyboard$:Br})),...ie("source").map(e=>ai(e))),os=H(()=>T(...ie("announce").map(e=>mn(e)),...ie("content").map(e=>Hn(e,{viewport$:_e,target$:wt,print$:bi})),...ie("content").map(e=>G("search.highlight")?ri(e,{index$:vi,location$:Rt}):L),...ie("header-title").map(e=>In(e,{viewport$:_e,header$:tt})),...ie("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?Ur(hi,()=>Qr(e,{viewport$:_e,header$:tt,main$:$t})):Ur(ur,()=>Qr(e,{viewport$:_e,header$:tt,main$:$t}))),...ie("tabs").map(e=>si(e,{viewport$:_e,header$:tt})),...ie("toc").map(e=>ci(e,{viewport$:_e,header$:tt,main$:$t,target$:wt})),...ie("top").map(e=>pi(e,{viewport$:_e,header$:tt,main$:$t,target$:wt})))),gi=rt.pipe(v(()=>os),$e(rs),B(1));gi.subscribe();window.document$=rt;window.location$=Rt;window.target$=wt;window.keyboard$=Br;window.viewport$=_e;window.tablet$=ur;window.screen$=hi;window.print$=bi;window.alert$=Gr;window.progress$=Jr;window.component$=gi;})(); +//# sourceMappingURL=bundle.1e8ae164.min.js.map + diff --git a/assets/javascripts/bundle.1e8ae164.min.js.map b/assets/javascripts/bundle.1e8ae164.min.js.map new file mode 100644 index 0000000..6c33b8e --- /dev/null +++ b/assets/javascripts/bundle.1e8ae164.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "src/templates/assets/javascripts/bundle.ts", "node_modules/rxjs/node_modules/tslib/tslib.es6.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/EmptyError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/throwIfEmpty.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/first.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/templates/assets/javascripts/browser/document/index.ts", "src/templates/assets/javascripts/browser/element/_/index.ts", "src/templates/assets/javascripts/browser/element/focus/index.ts", "src/templates/assets/javascripts/browser/element/hover/index.ts", "src/templates/assets/javascripts/browser/element/offset/_/index.ts", "src/templates/assets/javascripts/browser/element/offset/content/index.ts", "src/templates/assets/javascripts/utilities/h/index.ts", "src/templates/assets/javascripts/utilities/round/index.ts", "src/templates/assets/javascripts/browser/script/index.ts", "src/templates/assets/javascripts/browser/element/size/_/index.ts", "src/templates/assets/javascripts/browser/element/size/content/index.ts", "src/templates/assets/javascripts/browser/element/visibility/index.ts", "src/templates/assets/javascripts/browser/toggle/index.ts", "src/templates/assets/javascripts/browser/keyboard/index.ts", "src/templates/assets/javascripts/browser/location/_/index.ts", "src/templates/assets/javascripts/browser/location/hash/index.ts", "src/templates/assets/javascripts/browser/media/index.ts", "src/templates/assets/javascripts/browser/request/index.ts", "src/templates/assets/javascripts/browser/viewport/offset/index.ts", "src/templates/assets/javascripts/browser/viewport/size/index.ts", "src/templates/assets/javascripts/browser/viewport/_/index.ts", "src/templates/assets/javascripts/browser/viewport/at/index.ts", "src/templates/assets/javascripts/browser/worker/index.ts", "src/templates/assets/javascripts/_/index.ts", "src/templates/assets/javascripts/components/_/index.ts", "src/templates/assets/javascripts/components/announce/index.ts", "src/templates/assets/javascripts/components/consent/index.ts", "src/templates/assets/javascripts/templates/tooltip/index.tsx", "src/templates/assets/javascripts/templates/annotation/index.tsx", "src/templates/assets/javascripts/templates/clipboard/index.tsx", "src/templates/assets/javascripts/templates/search/index.tsx", "src/templates/assets/javascripts/templates/source/index.tsx", "src/templates/assets/javascripts/templates/tabbed/index.tsx", "src/templates/assets/javascripts/templates/table/index.tsx", "src/templates/assets/javascripts/templates/version/index.tsx", "src/templates/assets/javascripts/components/tooltip/index.ts", "src/templates/assets/javascripts/components/content/annotation/_/index.ts", "src/templates/assets/javascripts/components/content/annotation/list/index.ts", "src/templates/assets/javascripts/components/content/annotation/block/index.ts", "src/templates/assets/javascripts/components/content/code/_/index.ts", "src/templates/assets/javascripts/components/content/details/index.ts", "src/templates/assets/javascripts/components/content/mermaid/index.css", "src/templates/assets/javascripts/components/content/mermaid/index.ts", "src/templates/assets/javascripts/components/content/table/index.ts", "src/templates/assets/javascripts/components/content/tabs/index.ts", "src/templates/assets/javascripts/components/content/_/index.ts", "src/templates/assets/javascripts/components/dialog/index.ts", "src/templates/assets/javascripts/components/header/_/index.ts", "src/templates/assets/javascripts/components/header/title/index.ts", "src/templates/assets/javascripts/components/main/index.ts", "src/templates/assets/javascripts/components/palette/index.ts", "src/templates/assets/javascripts/components/progress/index.ts", "src/templates/assets/javascripts/integrations/clipboard/index.ts", "src/templates/assets/javascripts/integrations/sitemap/index.ts", "src/templates/assets/javascripts/integrations/instant/index.ts", "src/templates/assets/javascripts/integrations/search/highlighter/index.ts", "src/templates/assets/javascripts/integrations/search/worker/message/index.ts", "src/templates/assets/javascripts/integrations/search/worker/_/index.ts", "src/templates/assets/javascripts/integrations/version/index.ts", "src/templates/assets/javascripts/components/search/query/index.ts", "src/templates/assets/javascripts/components/search/result/index.ts", "src/templates/assets/javascripts/components/search/share/index.ts", "src/templates/assets/javascripts/components/search/suggest/index.ts", "src/templates/assets/javascripts/components/search/_/index.ts", "src/templates/assets/javascripts/components/search/highlight/index.ts", "src/templates/assets/javascripts/components/sidebar/index.ts", "src/templates/assets/javascripts/components/source/facts/github/index.ts", "src/templates/assets/javascripts/components/source/facts/gitlab/index.ts", "src/templates/assets/javascripts/components/source/facts/_/index.ts", "src/templates/assets/javascripts/components/source/_/index.ts", "src/templates/assets/javascripts/components/tabs/index.ts", "src/templates/assets/javascripts/components/toc/index.ts", "src/templates/assets/javascripts/components/top/index.ts", "src/templates/assets/javascripts/patches/ellipsis/index.ts", "src/templates/assets/javascripts/patches/indeterminate/index.ts", "src/templates/assets/javascripts/patches/scrollfix/index.ts", "src/templates/assets/javascripts/patches/scrolllock/index.ts", "src/templates/assets/javascripts/polyfills/index.ts"], + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2024 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"focus-visible\"\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getActiveElement,\n getOptionalElement,\n requestJSON,\n setLocation,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchScript,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountProgress,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantNavigation,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchEllipsis,\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Functions - @todo refactor\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch search index\n *\n * @returns Search index observable\n */\nfunction fetchSearchIndex(): Observable {\n if (location.protocol === \"file:\") {\n return watchScript(\n `${new URL(\"search/search_index.js\", config.base)}`\n )\n .pipe(\n // @ts-ignore - @todo fix typings\n map(() => __index),\n shareReplay(1)\n )\n } else {\n return requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget(location$)\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? fetchSearchIndex()\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up progress indicator */\nconst progress$ = new Subject()\n\n/* Set up instant navigation, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantNavigation({ location$, viewport$, progress$ })\n .subscribe(document$)\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"link[rel=prev]\")\n if (typeof prev !== \"undefined\")\n setLocation(prev)\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"link[rel=next]\")\n if (typeof next !== \"undefined\")\n setLocation(next)\n break\n\n /* Expand navigation, see https://bit.ly/3ZjG5io */\n case \"Enter\":\n const active = getActiveElement()\n if (active instanceof HTMLLabelElement)\n active.click()\n }\n })\n\n/* Set up patches */\npatchEllipsis({ document$ })\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Progress bar */\n ...getComponentElements(\"progress\")\n .map(el => mountProgress(el, { progress$ })),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, {\n viewport$, header$, main$, target$\n })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.progress$ = progress$ /* Progress indicator subject */\nwindow.component$ = component$ /* Component observable */\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n}\r\n", "/**\n * Returns true if the object is a function.\n * @param value The value to check\n */\nexport function isFunction(value: any): value is (...args: any[]) => any {\n return typeof value === 'function';\n}\n", "/**\n * Used to create Error subclasses until the community moves away from ES5.\n *\n * This is because compiling from TypeScript down to ES5 has issues with subclassing Errors\n * as well as other built-in types: https://github.com/Microsoft/TypeScript/issues/12123\n *\n * @param createImpl A factory function to create the actual constructor implementation. The returned\n * function should be a named function that calls `_super` internally.\n */\nexport function createErrorClass(createImpl: (_super: any) => any): T {\n const _super = (instance: any) => {\n Error.call(instance);\n instance.stack = new Error().stack;\n };\n\n const ctorFunc = createImpl(_super);\n ctorFunc.prototype = Object.create(Error.prototype);\n ctorFunc.prototype.constructor = ctorFunc;\n return ctorFunc;\n}\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface UnsubscriptionError extends Error {\n readonly errors: any[];\n}\n\nexport interface UnsubscriptionErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (errors: any[]): UnsubscriptionError;\n}\n\n/**\n * An error thrown when one or more errors have occurred during the\n * `unsubscribe` of a {@link Subscription}.\n */\nexport const UnsubscriptionError: UnsubscriptionErrorCtor = createErrorClass(\n (_super) =>\n function UnsubscriptionErrorImpl(this: any, errors: (Error | string)[]) {\n _super(this);\n this.message = errors\n ? `${errors.length} errors occurred during unsubscription:\n${errors.map((err, i) => `${i + 1}) ${err.toString()}`).join('\\n ')}`\n : '';\n this.name = 'UnsubscriptionError';\n this.errors = errors;\n }\n);\n", "/**\n * Removes an item from an array, mutating it.\n * @param arr The array to remove the item from\n * @param item The item to remove\n */\nexport function arrRemove(arr: T[] | undefined | null, item: T) {\n if (arr) {\n const index = arr.indexOf(item);\n 0 <= index && arr.splice(index, 1);\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { UnsubscriptionError } from './util/UnsubscriptionError';\nimport { SubscriptionLike, TeardownLogic, Unsubscribable } from './types';\nimport { arrRemove } from './util/arrRemove';\n\n/**\n * Represents a disposable resource, such as the execution of an Observable. A\n * Subscription has one important method, `unsubscribe`, that takes no argument\n * and just disposes the resource held by the subscription.\n *\n * Additionally, subscriptions may be grouped together through the `add()`\n * method, which will attach a child Subscription to the current Subscription.\n * When a Subscription is unsubscribed, all its children (and its grandchildren)\n * will be unsubscribed as well.\n *\n * @class Subscription\n */\nexport class Subscription implements SubscriptionLike {\n /** @nocollapse */\n public static EMPTY = (() => {\n const empty = new Subscription();\n empty.closed = true;\n return empty;\n })();\n\n /**\n * A flag to indicate whether this Subscription has already been unsubscribed.\n */\n public closed = false;\n\n private _parentage: Subscription[] | Subscription | null = null;\n\n /**\n * The list of registered finalizers to execute upon unsubscription. Adding and removing from this\n * list occurs in the {@link #add} and {@link #remove} methods.\n */\n private _finalizers: Exclude[] | null = null;\n\n /**\n * @param initialTeardown A function executed first as part of the finalization\n * process that is kicked off when {@link #unsubscribe} is called.\n */\n constructor(private initialTeardown?: () => void) {}\n\n /**\n * Disposes the resources held by the subscription. May, for instance, cancel\n * an ongoing Observable execution or cancel any other type of work that\n * started when the Subscription was created.\n * @return {void}\n */\n unsubscribe(): void {\n let errors: any[] | undefined;\n\n if (!this.closed) {\n this.closed = true;\n\n // Remove this from it's parents.\n const { _parentage } = this;\n if (_parentage) {\n this._parentage = null;\n if (Array.isArray(_parentage)) {\n for (const parent of _parentage) {\n parent.remove(this);\n }\n } else {\n _parentage.remove(this);\n }\n }\n\n const { initialTeardown: initialFinalizer } = this;\n if (isFunction(initialFinalizer)) {\n try {\n initialFinalizer();\n } catch (e) {\n errors = e instanceof UnsubscriptionError ? e.errors : [e];\n }\n }\n\n const { _finalizers } = this;\n if (_finalizers) {\n this._finalizers = null;\n for (const finalizer of _finalizers) {\n try {\n execFinalizer(finalizer);\n } catch (err) {\n errors = errors ?? [];\n if (err instanceof UnsubscriptionError) {\n errors = [...errors, ...err.errors];\n } else {\n errors.push(err);\n }\n }\n }\n }\n\n if (errors) {\n throw new UnsubscriptionError(errors);\n }\n }\n }\n\n /**\n * Adds a finalizer to this subscription, so that finalization will be unsubscribed/called\n * when this subscription is unsubscribed. If this subscription is already {@link #closed},\n * because it has already been unsubscribed, then whatever finalizer is passed to it\n * will automatically be executed (unless the finalizer itself is also a closed subscription).\n *\n * Closed Subscriptions cannot be added as finalizers to any subscription. Adding a closed\n * subscription to a any subscription will result in no operation. (A noop).\n *\n * Adding a subscription to itself, or adding `null` or `undefined` will not perform any\n * operation at all. (A noop).\n *\n * `Subscription` instances that are added to this instance will automatically remove themselves\n * if they are unsubscribed. Functions and {@link Unsubscribable} objects that you wish to remove\n * will need to be removed manually with {@link #remove}\n *\n * @param teardown The finalization logic to add to this subscription.\n */\n add(teardown: TeardownLogic): void {\n // Only add the finalizer if it's not undefined\n // and don't add a subscription to itself.\n if (teardown && teardown !== this) {\n if (this.closed) {\n // If this subscription is already closed,\n // execute whatever finalizer is handed to it automatically.\n execFinalizer(teardown);\n } else {\n if (teardown instanceof Subscription) {\n // We don't add closed subscriptions, and we don't add the same subscription\n // twice. Subscription unsubscribe is idempotent.\n if (teardown.closed || teardown._hasParent(this)) {\n return;\n }\n teardown._addParent(this);\n }\n (this._finalizers = this._finalizers ?? []).push(teardown);\n }\n }\n }\n\n /**\n * Checks to see if a this subscription already has a particular parent.\n * This will signal that this subscription has already been added to the parent in question.\n * @param parent the parent to check for\n */\n private _hasParent(parent: Subscription) {\n const { _parentage } = this;\n return _parentage === parent || (Array.isArray(_parentage) && _parentage.includes(parent));\n }\n\n /**\n * Adds a parent to this subscription so it can be removed from the parent if it\n * unsubscribes on it's own.\n *\n * NOTE: THIS ASSUMES THAT {@link _hasParent} HAS ALREADY BEEN CHECKED.\n * @param parent The parent subscription to add\n */\n private _addParent(parent: Subscription) {\n const { _parentage } = this;\n this._parentage = Array.isArray(_parentage) ? (_parentage.push(parent), _parentage) : _parentage ? [_parentage, parent] : parent;\n }\n\n /**\n * Called on a child when it is removed via {@link #remove}.\n * @param parent The parent to remove\n */\n private _removeParent(parent: Subscription) {\n const { _parentage } = this;\n if (_parentage === parent) {\n this._parentage = null;\n } else if (Array.isArray(_parentage)) {\n arrRemove(_parentage, parent);\n }\n }\n\n /**\n * Removes a finalizer from this subscription that was previously added with the {@link #add} method.\n *\n * Note that `Subscription` instances, when unsubscribed, will automatically remove themselves\n * from every other `Subscription` they have been added to. This means that using the `remove` method\n * is not a common thing and should be used thoughtfully.\n *\n * If you add the same finalizer instance of a function or an unsubscribable object to a `Subscription` instance\n * more than once, you will need to call `remove` the same number of times to remove all instances.\n *\n * All finalizer instances are removed to free up memory upon unsubscription.\n *\n * @param teardown The finalizer to remove from this subscription\n */\n remove(teardown: Exclude): void {\n const { _finalizers } = this;\n _finalizers && arrRemove(_finalizers, teardown);\n\n if (teardown instanceof Subscription) {\n teardown._removeParent(this);\n }\n }\n}\n\nexport const EMPTY_SUBSCRIPTION = Subscription.EMPTY;\n\nexport function isSubscription(value: any): value is Subscription {\n return (\n value instanceof Subscription ||\n (value && 'closed' in value && isFunction(value.remove) && isFunction(value.add) && isFunction(value.unsubscribe))\n );\n}\n\nfunction execFinalizer(finalizer: Unsubscribable | (() => void)) {\n if (isFunction(finalizer)) {\n finalizer();\n } else {\n finalizer.unsubscribe();\n }\n}\n", "import { Subscriber } from './Subscriber';\nimport { ObservableNotification } from './types';\n\n/**\n * The {@link GlobalConfig} object for RxJS. It is used to configure things\n * like how to react on unhandled errors.\n */\nexport const config: GlobalConfig = {\n onUnhandledError: null,\n onStoppedNotification: null,\n Promise: undefined,\n useDeprecatedSynchronousErrorHandling: false,\n useDeprecatedNextContext: false,\n};\n\n/**\n * The global configuration object for RxJS, used to configure things\n * like how to react on unhandled errors. Accessible via {@link config}\n * object.\n */\nexport interface GlobalConfig {\n /**\n * A registration point for unhandled errors from RxJS. These are errors that\n * cannot were not handled by consuming code in the usual subscription path. For\n * example, if you have this configured, and you subscribe to an observable without\n * providing an error handler, errors from that subscription will end up here. This\n * will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onUnhandledError: ((err: any) => void) | null;\n\n /**\n * A registration point for notifications that cannot be sent to subscribers because they\n * have completed, errored or have been explicitly unsubscribed. By default, next, complete\n * and error notifications sent to stopped subscribers are noops. However, sometimes callers\n * might want a different behavior. For example, with sources that attempt to report errors\n * to stopped subscribers, a caller can configure RxJS to throw an unhandled error instead.\n * This will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onStoppedNotification: ((notification: ObservableNotification, subscriber: Subscriber) => void) | null;\n\n /**\n * The promise constructor used by default for {@link Observable#toPromise toPromise} and {@link Observable#forEach forEach}\n * methods.\n *\n * @deprecated As of version 8, RxJS will no longer support this sort of injection of a\n * Promise constructor. If you need a Promise implementation other than native promises,\n * please polyfill/patch Promise as you see appropriate. Will be removed in v8.\n */\n Promise?: PromiseConstructorLike;\n\n /**\n * If true, turns on synchronous error rethrowing, which is a deprecated behavior\n * in v6 and higher. This behavior enables bad patterns like wrapping a subscribe\n * call in a try/catch block. It also enables producer interference, a nasty bug\n * where a multicast can be broken for all observers by a downstream consumer with\n * an unhandled error. DO NOT USE THIS FLAG UNLESS IT'S NEEDED TO BUY TIME\n * FOR MIGRATION REASONS.\n *\n * @deprecated As of version 8, RxJS will no longer support synchronous throwing\n * of unhandled errors. All errors will be thrown on a separate call stack to prevent bad\n * behaviors described above. Will be removed in v8.\n */\n useDeprecatedSynchronousErrorHandling: boolean;\n\n /**\n * If true, enables an as-of-yet undocumented feature from v5: The ability to access\n * `unsubscribe()` via `this` context in `next` functions created in observers passed\n * to `subscribe`.\n *\n * This is being removed because the performance was severely problematic, and it could also cause\n * issues when types other than POJOs are passed to subscribe as subscribers, as they will likely have\n * their `this` context overwritten.\n *\n * @deprecated As of version 8, RxJS will no longer support altering the\n * context of next functions provided as part of an observer to Subscribe. Instead,\n * you will have access to a subscription or a signal or token that will allow you to do things like\n * unsubscribe and test closed status. Will be removed in v8.\n */\n useDeprecatedNextContext: boolean;\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetTimeoutFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearTimeoutFunction = (handle: TimerHandle) => void;\n\ninterface TimeoutProvider {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n delegate:\n | {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n }\n | undefined;\n}\n\nexport const timeoutProvider: TimeoutProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setTimeout(handler: () => void, timeout?: number, ...args) {\n const { delegate } = timeoutProvider;\n if (delegate?.setTimeout) {\n return delegate.setTimeout(handler, timeout, ...args);\n }\n return setTimeout(handler, timeout, ...args);\n },\n clearTimeout(handle) {\n const { delegate } = timeoutProvider;\n return (delegate?.clearTimeout || clearTimeout)(handle as any);\n },\n delegate: undefined,\n};\n", "import { config } from '../config';\nimport { timeoutProvider } from '../scheduler/timeoutProvider';\n\n/**\n * Handles an error on another job either with the user-configured {@link onUnhandledError},\n * or by throwing it on that new job so it can be picked up by `window.onerror`, `process.on('error')`, etc.\n *\n * This should be called whenever there is an error that is out-of-band with the subscription\n * or when an error hits a terminal boundary of the subscription and no error handler was provided.\n *\n * @param err the error to report\n */\nexport function reportUnhandledError(err: any) {\n timeoutProvider.setTimeout(() => {\n const { onUnhandledError } = config;\n if (onUnhandledError) {\n // Execute the user-configured error handler.\n onUnhandledError(err);\n } else {\n // Throw so it is picked up by the runtime's uncaught error mechanism.\n throw err;\n }\n });\n}\n", "/* tslint:disable:no-empty */\nexport function noop() { }\n", "import { CompleteNotification, NextNotification, ErrorNotification } from './types';\n\n/**\n * A completion object optimized for memory use and created to be the\n * same \"shape\" as other notifications in v8.\n * @internal\n */\nexport const COMPLETE_NOTIFICATION = (() => createNotification('C', undefined, undefined) as CompleteNotification)();\n\n/**\n * Internal use only. Creates an optimized error notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function errorNotification(error: any): ErrorNotification {\n return createNotification('E', undefined, error) as any;\n}\n\n/**\n * Internal use only. Creates an optimized next notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function nextNotification(value: T) {\n return createNotification('N', value, undefined) as NextNotification;\n}\n\n/**\n * Ensures that all notifications created internally have the same \"shape\" in v8.\n *\n * TODO: This is only exported to support a crazy legacy test in `groupBy`.\n * @internal\n */\nexport function createNotification(kind: 'N' | 'E' | 'C', value: any, error: any) {\n return {\n kind,\n value,\n error,\n };\n}\n", "import { config } from '../config';\n\nlet context: { errorThrown: boolean; error: any } | null = null;\n\n/**\n * Handles dealing with errors for super-gross mode. Creates a context, in which\n * any synchronously thrown errors will be passed to {@link captureError}. Which\n * will record the error such that it will be rethrown after the call back is complete.\n * TODO: Remove in v8\n * @param cb An immediately executed function.\n */\nexport function errorContext(cb: () => void) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n const isRoot = !context;\n if (isRoot) {\n context = { errorThrown: false, error: null };\n }\n cb();\n if (isRoot) {\n const { errorThrown, error } = context!;\n context = null;\n if (errorThrown) {\n throw error;\n }\n }\n } else {\n // This is the general non-deprecated path for everyone that\n // isn't crazy enough to use super-gross mode (useDeprecatedSynchronousErrorHandling)\n cb();\n }\n}\n\n/**\n * Captures errors only in super-gross mode.\n * @param err the error to capture\n */\nexport function captureError(err: any) {\n if (config.useDeprecatedSynchronousErrorHandling && context) {\n context.errorThrown = true;\n context.error = err;\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { Observer, ObservableNotification } from './types';\nimport { isSubscription, Subscription } from './Subscription';\nimport { config } from './config';\nimport { reportUnhandledError } from './util/reportUnhandledError';\nimport { noop } from './util/noop';\nimport { nextNotification, errorNotification, COMPLETE_NOTIFICATION } from './NotificationFactories';\nimport { timeoutProvider } from './scheduler/timeoutProvider';\nimport { captureError } from './util/errorContext';\n\n/**\n * Implements the {@link Observer} interface and extends the\n * {@link Subscription} class. While the {@link Observer} is the public API for\n * consuming the values of an {@link Observable}, all Observers get converted to\n * a Subscriber, in order to provide Subscription-like capabilities such as\n * `unsubscribe`. Subscriber is a common type in RxJS, and crucial for\n * implementing operators, but it is rarely used as a public API.\n *\n * @class Subscriber\n */\nexport class Subscriber extends Subscription implements Observer {\n /**\n * A static factory for a Subscriber, given a (potentially partial) definition\n * of an Observer.\n * @param next The `next` callback of an Observer.\n * @param error The `error` callback of an\n * Observer.\n * @param complete The `complete` callback of an\n * Observer.\n * @return A Subscriber wrapping the (partially defined)\n * Observer represented by the given arguments.\n * @nocollapse\n * @deprecated Do not use. Will be removed in v8. There is no replacement for this\n * method, and there is no reason to be creating instances of `Subscriber` directly.\n * If you have a specific use case, please file an issue.\n */\n static create(next?: (x?: T) => void, error?: (e?: any) => void, complete?: () => void): Subscriber {\n return new SafeSubscriber(next, error, complete);\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected isStopped: boolean = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected destination: Subscriber | Observer; // this `any` is the escape hatch to erase extra type param (e.g. R)\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * There is no reason to directly create an instance of Subscriber. This type is exported for typings reasons.\n */\n constructor(destination?: Subscriber | Observer) {\n super();\n if (destination) {\n this.destination = destination;\n // Automatically chain subscriptions together here.\n // if destination is a Subscription, then it is a Subscriber.\n if (isSubscription(destination)) {\n destination.add(this);\n }\n } else {\n this.destination = EMPTY_OBSERVER;\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `next` from\n * the Observable, with a value. The Observable may call this method 0 or more\n * times.\n * @param {T} [value] The `next` value.\n * @return {void}\n */\n next(value?: T): void {\n if (this.isStopped) {\n handleStoppedNotification(nextNotification(value), this);\n } else {\n this._next(value!);\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `error` from\n * the Observable, with an attached `Error`. Notifies the Observer that\n * the Observable has experienced an error condition.\n * @param {any} [err] The `error` exception.\n * @return {void}\n */\n error(err?: any): void {\n if (this.isStopped) {\n handleStoppedNotification(errorNotification(err), this);\n } else {\n this.isStopped = true;\n this._error(err);\n }\n }\n\n /**\n * The {@link Observer} callback to receive a valueless notification of type\n * `complete` from the Observable. Notifies the Observer that the Observable\n * has finished sending push-based notifications.\n * @return {void}\n */\n complete(): void {\n if (this.isStopped) {\n handleStoppedNotification(COMPLETE_NOTIFICATION, this);\n } else {\n this.isStopped = true;\n this._complete();\n }\n }\n\n unsubscribe(): void {\n if (!this.closed) {\n this.isStopped = true;\n super.unsubscribe();\n this.destination = null!;\n }\n }\n\n protected _next(value: T): void {\n this.destination.next(value);\n }\n\n protected _error(err: any): void {\n try {\n this.destination.error(err);\n } finally {\n this.unsubscribe();\n }\n }\n\n protected _complete(): void {\n try {\n this.destination.complete();\n } finally {\n this.unsubscribe();\n }\n }\n}\n\n/**\n * This bind is captured here because we want to be able to have\n * compatibility with monoid libraries that tend to use a method named\n * `bind`. In particular, a library called Monio requires this.\n */\nconst _bind = Function.prototype.bind;\n\nfunction bind any>(fn: Fn, thisArg: any): Fn {\n return _bind.call(fn, thisArg);\n}\n\n/**\n * Internal optimization only, DO NOT EXPOSE.\n * @internal\n */\nclass ConsumerObserver implements Observer {\n constructor(private partialObserver: Partial>) {}\n\n next(value: T): void {\n const { partialObserver } = this;\n if (partialObserver.next) {\n try {\n partialObserver.next(value);\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n\n error(err: any): void {\n const { partialObserver } = this;\n if (partialObserver.error) {\n try {\n partialObserver.error(err);\n } catch (error) {\n handleUnhandledError(error);\n }\n } else {\n handleUnhandledError(err);\n }\n }\n\n complete(): void {\n const { partialObserver } = this;\n if (partialObserver.complete) {\n try {\n partialObserver.complete();\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n}\n\nexport class SafeSubscriber extends Subscriber {\n constructor(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((e?: any) => void) | null,\n complete?: (() => void) | null\n ) {\n super();\n\n let partialObserver: Partial>;\n if (isFunction(observerOrNext) || !observerOrNext) {\n // The first argument is a function, not an observer. The next\n // two arguments *could* be observers, or they could be empty.\n partialObserver = {\n next: (observerOrNext ?? undefined) as (((value: T) => void) | undefined),\n error: error ?? undefined,\n complete: complete ?? undefined,\n };\n } else {\n // The first argument is a partial observer.\n let context: any;\n if (this && config.useDeprecatedNextContext) {\n // This is a deprecated path that made `this.unsubscribe()` available in\n // next handler functions passed to subscribe. This only exists behind a flag\n // now, as it is *very* slow.\n context = Object.create(observerOrNext);\n context.unsubscribe = () => this.unsubscribe();\n partialObserver = {\n next: observerOrNext.next && bind(observerOrNext.next, context),\n error: observerOrNext.error && bind(observerOrNext.error, context),\n complete: observerOrNext.complete && bind(observerOrNext.complete, context),\n };\n } else {\n // The \"normal\" path. Just use the partial observer directly.\n partialObserver = observerOrNext;\n }\n }\n\n // Wrap the partial observer to ensure it's a full observer, and\n // make sure proper error handling is accounted for.\n this.destination = new ConsumerObserver(partialObserver);\n }\n}\n\nfunction handleUnhandledError(error: any) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n captureError(error);\n } else {\n // Ideal path, we report this as an unhandled error,\n // which is thrown on a new call stack.\n reportUnhandledError(error);\n }\n}\n\n/**\n * An error handler used when no error handler was supplied\n * to the SafeSubscriber -- meaning no error handler was supplied\n * do the `subscribe` call on our observable.\n * @param err The error to handle\n */\nfunction defaultErrorHandler(err: any) {\n throw err;\n}\n\n/**\n * A handler for notifications that cannot be sent to a stopped subscriber.\n * @param notification The notification being sent\n * @param subscriber The stopped subscriber\n */\nfunction handleStoppedNotification(notification: ObservableNotification, subscriber: Subscriber) {\n const { onStoppedNotification } = config;\n onStoppedNotification && timeoutProvider.setTimeout(() => onStoppedNotification(notification, subscriber));\n}\n\n/**\n * The observer used as a stub for subscriptions where the user did not\n * pass any arguments to `subscribe`. Comes with the default error handling\n * behavior.\n */\nexport const EMPTY_OBSERVER: Readonly> & { closed: true } = {\n closed: true,\n next: noop,\n error: defaultErrorHandler,\n complete: noop,\n};\n", "/**\n * Symbol.observable or a string \"@@observable\". Used for interop\n *\n * @deprecated We will no longer be exporting this symbol in upcoming versions of RxJS.\n * Instead polyfill and use Symbol.observable directly *or* use https://www.npmjs.com/package/symbol-observable\n */\nexport const observable: string | symbol = (() => (typeof Symbol === 'function' && Symbol.observable) || '@@observable')();\n", "/**\n * This function takes one parameter and just returns it. Simply put,\n * this is like `(x: T): T => x`.\n *\n * ## Examples\n *\n * This is useful in some cases when using things like `mergeMap`\n *\n * ```ts\n * import { interval, take, map, range, mergeMap, identity } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(5));\n *\n * const result$ = source$.pipe(\n * map(i => range(i)),\n * mergeMap(identity) // same as mergeMap(x => x)\n * );\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * Or when you want to selectively apply an operator\n *\n * ```ts\n * import { interval, take, identity } from 'rxjs';\n *\n * const shouldLimit = () => Math.random() < 0.5;\n *\n * const source$ = interval(1000);\n *\n * const result$ = source$.pipe(shouldLimit() ? take(5) : identity);\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * @param x Any value that is returned by this function\n * @returns The value passed as the first parameter to this function\n */\nexport function identity(x: T): T {\n return x;\n}\n", "import { identity } from './identity';\nimport { UnaryFunction } from '../types';\n\nexport function pipe(): typeof identity;\nexport function pipe(fn1: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction, fn3: UnaryFunction): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction,\n ...fns: UnaryFunction[]\n): UnaryFunction;\n\n/**\n * pipe() can be called on one or more functions, each of which can take one argument (\"UnaryFunction\")\n * and uses it to return a value.\n * It returns a function that takes one argument, passes it to the first UnaryFunction, and then\n * passes the result to the next one, passes that result to the next one, and so on. \n */\nexport function pipe(...fns: Array>): UnaryFunction {\n return pipeFromArray(fns);\n}\n\n/** @internal */\nexport function pipeFromArray(fns: Array>): UnaryFunction {\n if (fns.length === 0) {\n return identity as UnaryFunction;\n }\n\n if (fns.length === 1) {\n return fns[0];\n }\n\n return function piped(input: T): R {\n return fns.reduce((prev: any, fn: UnaryFunction) => fn(prev), input as any);\n };\n}\n", "import { Operator } from './Operator';\nimport { SafeSubscriber, Subscriber } from './Subscriber';\nimport { isSubscription, Subscription } from './Subscription';\nimport { TeardownLogic, OperatorFunction, Subscribable, Observer } from './types';\nimport { observable as Symbol_observable } from './symbol/observable';\nimport { pipeFromArray } from './util/pipe';\nimport { config } from './config';\nimport { isFunction } from './util/isFunction';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A representation of any set of values over any amount of time. This is the most basic building block\n * of RxJS.\n *\n * @class Observable\n */\nexport class Observable implements Subscribable {\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n source: Observable | undefined;\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n operator: Operator | undefined;\n\n /**\n * @constructor\n * @param {Function} subscribe the function that is called when the Observable is\n * initially subscribed to. This function is given a Subscriber, to which new values\n * can be `next`ed, or an `error` method can be called to raise an error, or\n * `complete` can be called to notify of a successful completion.\n */\n constructor(subscribe?: (this: Observable, subscriber: Subscriber) => TeardownLogic) {\n if (subscribe) {\n this._subscribe = subscribe;\n }\n }\n\n // HACK: Since TypeScript inherits static properties too, we have to\n // fight against TypeScript here so Subject can have a different static create signature\n /**\n * Creates a new Observable by calling the Observable constructor\n * @owner Observable\n * @method create\n * @param {Function} subscribe? the subscriber function to be passed to the Observable constructor\n * @return {Observable} a new observable\n * @nocollapse\n * @deprecated Use `new Observable()` instead. Will be removed in v8.\n */\n static create: (...args: any[]) => any = (subscribe?: (subscriber: Subscriber) => TeardownLogic) => {\n return new Observable(subscribe);\n };\n\n /**\n * Creates a new Observable, with this Observable instance as the source, and the passed\n * operator defined as the new observable's operator.\n * @method lift\n * @param operator the operator defining the operation to take on the observable\n * @return a new observable with the Operator applied\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * If you have implemented an operator using `lift`, it is recommended that you create an\n * operator by simply returning `new Observable()` directly. See \"Creating new operators from\n * scratch\" section here: https://rxjs.dev/guide/operators\n */\n lift(operator?: Operator): Observable {\n const observable = new Observable();\n observable.source = this;\n observable.operator = operator;\n return observable;\n }\n\n subscribe(observerOrNext?: Partial> | ((value: T) => void)): Subscription;\n /** @deprecated Instead of passing separate callback arguments, use an observer argument. Signatures taking separate callback arguments will be removed in v8. Details: https://rxjs.dev/deprecations/subscribe-arguments */\n subscribe(next?: ((value: T) => void) | null, error?: ((error: any) => void) | null, complete?: (() => void) | null): Subscription;\n /**\n * Invokes an execution of an Observable and registers Observer handlers for notifications it will emit.\n *\n * Use it when you have all these Observables, but still nothing is happening.\n *\n * `subscribe` is not a regular operator, but a method that calls Observable's internal `subscribe` function. It\n * might be for example a function that you passed to Observable's constructor, but most of the time it is\n * a library implementation, which defines what will be emitted by an Observable, and when it be will emitted. This means\n * that calling `subscribe` is actually the moment when Observable starts its work, not when it is created, as it is often\n * the thought.\n *\n * Apart from starting the execution of an Observable, this method allows you to listen for values\n * that an Observable emits, as well as for when it completes or errors. You can achieve this in two\n * of the following ways.\n *\n * The first way is creating an object that implements {@link Observer} interface. It should have methods\n * defined by that interface, but note that it should be just a regular JavaScript object, which you can create\n * yourself in any way you want (ES6 class, classic function constructor, object literal etc.). In particular, do\n * not attempt to use any RxJS implementation details to create Observers - you don't need them. Remember also\n * that your object does not have to implement all methods. If you find yourself creating a method that doesn't\n * do anything, you can simply omit it. Note however, if the `error` method is not provided and an error happens,\n * it will be thrown asynchronously. Errors thrown asynchronously cannot be caught using `try`/`catch`. Instead,\n * use the {@link onUnhandledError} configuration option or use a runtime handler (like `window.onerror` or\n * `process.on('error)`) to be notified of unhandled errors. Because of this, it's recommended that you provide\n * an `error` method to avoid missing thrown errors.\n *\n * The second way is to give up on Observer object altogether and simply provide callback functions in place of its methods.\n * This means you can provide three functions as arguments to `subscribe`, where the first function is equivalent\n * of a `next` method, the second of an `error` method and the third of a `complete` method. Just as in case of an Observer,\n * if you do not need to listen for something, you can omit a function by passing `undefined` or `null`,\n * since `subscribe` recognizes these functions by where they were placed in function call. When it comes\n * to the `error` function, as with an Observer, if not provided, errors emitted by an Observable will be thrown asynchronously.\n *\n * You can, however, subscribe with no parameters at all. This may be the case where you're not interested in terminal events\n * and you also handled emissions internally by using operators (e.g. using `tap`).\n *\n * Whichever style of calling `subscribe` you use, in both cases it returns a Subscription object.\n * This object allows you to call `unsubscribe` on it, which in turn will stop the work that an Observable does and will clean\n * up all resources that an Observable used. Note that cancelling a subscription will not call `complete` callback\n * provided to `subscribe` function, which is reserved for a regular completion signal that comes from an Observable.\n *\n * Remember that callbacks provided to `subscribe` are not guaranteed to be called asynchronously.\n * It is an Observable itself that decides when these functions will be called. For example {@link of}\n * by default emits all its values synchronously. Always check documentation for how given Observable\n * will behave when subscribed and if its default behavior can be modified with a `scheduler`.\n *\n * #### Examples\n *\n * Subscribe with an {@link guide/observer Observer}\n *\n * ```ts\n * import { of } from 'rxjs';\n *\n * const sumObserver = {\n * sum: 0,\n * next(value) {\n * console.log('Adding: ' + value);\n * this.sum = this.sum + value;\n * },\n * error() {\n * // We actually could just remove this method,\n * // since we do not really care about errors right now.\n * },\n * complete() {\n * console.log('Sum equals: ' + this.sum);\n * }\n * };\n *\n * of(1, 2, 3) // Synchronously emits 1, 2, 3 and then completes.\n * .subscribe(sumObserver);\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Subscribe with functions ({@link deprecations/subscribe-arguments deprecated})\n *\n * ```ts\n * import { of } from 'rxjs'\n *\n * let sum = 0;\n *\n * of(1, 2, 3).subscribe(\n * value => {\n * console.log('Adding: ' + value);\n * sum = sum + value;\n * },\n * undefined,\n * () => console.log('Sum equals: ' + sum)\n * );\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Cancel a subscription\n *\n * ```ts\n * import { interval } from 'rxjs';\n *\n * const subscription = interval(1000).subscribe({\n * next(num) {\n * console.log(num)\n * },\n * complete() {\n * // Will not be called, even when cancelling subscription.\n * console.log('completed!');\n * }\n * });\n *\n * setTimeout(() => {\n * subscription.unsubscribe();\n * console.log('unsubscribed!');\n * }, 2500);\n *\n * // Logs:\n * // 0 after 1s\n * // 1 after 2s\n * // 'unsubscribed!' after 2.5s\n * ```\n *\n * @param {Observer|Function} observerOrNext (optional) Either an observer with methods to be called,\n * or the first of three possible handlers, which is the handler for each value emitted from the subscribed\n * Observable.\n * @param {Function} error (optional) A handler for a terminal event resulting from an error. If no error handler is provided,\n * the error will be thrown asynchronously as unhandled.\n * @param {Function} complete (optional) A handler for a terminal event resulting from successful completion.\n * @return {Subscription} a subscription reference to the registered handlers\n * @method subscribe\n */\n subscribe(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((error: any) => void) | null,\n complete?: (() => void) | null\n ): Subscription {\n const subscriber = isSubscriber(observerOrNext) ? observerOrNext : new SafeSubscriber(observerOrNext, error, complete);\n\n errorContext(() => {\n const { operator, source } = this;\n subscriber.add(\n operator\n ? // We're dealing with a subscription in the\n // operator chain to one of our lifted operators.\n operator.call(subscriber, source)\n : source\n ? // If `source` has a value, but `operator` does not, something that\n // had intimate knowledge of our API, like our `Subject`, must have\n // set it. We're going to just call `_subscribe` directly.\n this._subscribe(subscriber)\n : // In all other cases, we're likely wrapping a user-provided initializer\n // function, so we need to catch errors and handle them appropriately.\n this._trySubscribe(subscriber)\n );\n });\n\n return subscriber;\n }\n\n /** @internal */\n protected _trySubscribe(sink: Subscriber): TeardownLogic {\n try {\n return this._subscribe(sink);\n } catch (err) {\n // We don't need to return anything in this case,\n // because it's just going to try to `add()` to a subscription\n // above.\n sink.error(err);\n }\n }\n\n /**\n * Used as a NON-CANCELLABLE means of subscribing to an observable, for use with\n * APIs that expect promises, like `async/await`. You cannot unsubscribe from this.\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * #### Example\n *\n * ```ts\n * import { interval, take } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(4));\n *\n * async function getTotal() {\n * let total = 0;\n *\n * await source$.forEach(value => {\n * total += value;\n * console.log('observable -> ' + value);\n * });\n *\n * return total;\n * }\n *\n * getTotal().then(\n * total => console.log('Total: ' + total)\n * );\n *\n * // Expected:\n * // 'observable -> 0'\n * // 'observable -> 1'\n * // 'observable -> 2'\n * // 'observable -> 3'\n * // 'Total: 6'\n * ```\n *\n * @param next a handler for each value emitted by the observable\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n */\n forEach(next: (value: T) => void): Promise;\n\n /**\n * @param next a handler for each value emitted by the observable\n * @param promiseCtor a constructor function used to instantiate the Promise\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n * @deprecated Passing a Promise constructor will no longer be available\n * in upcoming versions of RxJS. This is because it adds weight to the library, for very\n * little benefit. If you need this functionality, it is recommended that you either\n * polyfill Promise, or you create an adapter to convert the returned native promise\n * to whatever promise implementation you wanted. Will be removed in v8.\n */\n forEach(next: (value: T) => void, promiseCtor: PromiseConstructorLike): Promise;\n\n forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n const subscriber = new SafeSubscriber({\n next: (value) => {\n try {\n next(value);\n } catch (err) {\n reject(err);\n subscriber.unsubscribe();\n }\n },\n error: reject,\n complete: resolve,\n });\n this.subscribe(subscriber);\n }) as Promise;\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): TeardownLogic {\n return this.source?.subscribe(subscriber);\n }\n\n /**\n * An interop point defined by the es7-observable spec https://github.com/zenparsing/es-observable\n * @method Symbol.observable\n * @return {Observable} this instance of the observable\n */\n [Symbol_observable]() {\n return this;\n }\n\n /* tslint:disable:max-line-length */\n pipe(): Observable;\n pipe(op1: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction, op3: OperatorFunction): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction,\n ...operations: OperatorFunction[]\n ): Observable;\n /* tslint:enable:max-line-length */\n\n /**\n * Used to stitch together functional operators into a chain.\n * @method pipe\n * @return {Observable} the Observable result of all of the operators having\n * been called in the order they were passed in.\n *\n * ## Example\n *\n * ```ts\n * import { interval, filter, map, scan } from 'rxjs';\n *\n * interval(1000)\n * .pipe(\n * filter(x => x % 2 === 0),\n * map(x => x + x),\n * scan((acc, x) => acc + x)\n * )\n * .subscribe(x => console.log(x));\n * ```\n */\n pipe(...operations: OperatorFunction[]): Observable {\n return pipeFromArray(operations)(this);\n }\n\n /* tslint:disable:max-line-length */\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: typeof Promise): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: PromiseConstructorLike): Promise;\n /* tslint:enable:max-line-length */\n\n /**\n * Subscribe to this Observable and get a Promise resolving on\n * `complete` with the last emission (if any).\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * @method toPromise\n * @param [promiseCtor] a constructor function used to instantiate\n * the Promise\n * @return A Promise that resolves with the last value emit, or\n * rejects on an error. If there were no emissions, Promise\n * resolves with undefined.\n * @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise\n */\n toPromise(promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n let value: T | undefined;\n this.subscribe(\n (x: T) => (value = x),\n (err: any) => reject(err),\n () => resolve(value)\n );\n }) as Promise;\n }\n}\n\n/**\n * Decides between a passed promise constructor from consuming code,\n * A default configured promise constructor, and the native promise\n * constructor and returns it. If nothing can be found, it will throw\n * an error.\n * @param promiseCtor The optional promise constructor to passed by consuming code\n */\nfunction getPromiseCtor(promiseCtor: PromiseConstructorLike | undefined) {\n return promiseCtor ?? config.Promise ?? Promise;\n}\n\nfunction isObserver(value: any): value is Observer {\n return value && isFunction(value.next) && isFunction(value.error) && isFunction(value.complete);\n}\n\nfunction isSubscriber(value: any): value is Subscriber {\n return (value && value instanceof Subscriber) || (isObserver(value) && isSubscription(value));\n}\n", "import { Observable } from '../Observable';\nimport { Subscriber } from '../Subscriber';\nimport { OperatorFunction } from '../types';\nimport { isFunction } from './isFunction';\n\n/**\n * Used to determine if an object is an Observable with a lift function.\n */\nexport function hasLift(source: any): source is { lift: InstanceType['lift'] } {\n return isFunction(source?.lift);\n}\n\n/**\n * Creates an `OperatorFunction`. Used to define operators throughout the library in a concise way.\n * @param init The logic to connect the liftedSource to the subscriber at the moment of subscription.\n */\nexport function operate(\n init: (liftedSource: Observable, subscriber: Subscriber) => (() => void) | void\n): OperatorFunction {\n return (source: Observable) => {\n if (hasLift(source)) {\n return source.lift(function (this: Subscriber, liftedSource: Observable) {\n try {\n return init(liftedSource, this);\n } catch (err) {\n this.error(err);\n }\n });\n }\n throw new TypeError('Unable to lift unknown Observable type');\n };\n}\n", "import { Subscriber } from '../Subscriber';\n\n/**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional teardown logic here. This will only be called on teardown if the\n * subscriber itself is not already closed. This is called after all other teardown logic is executed.\n */\nexport function createOperatorSubscriber(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n onFinalize?: () => void\n): Subscriber {\n return new OperatorSubscriber(destination, onNext, onComplete, onError, onFinalize);\n}\n\n/**\n * A generic helper for allowing operators to be created with a Subscriber and\n * use closures to capture necessary state from the operator function itself.\n */\nexport class OperatorSubscriber extends Subscriber {\n /**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional finalization logic here. This will only be called on finalization if the\n * subscriber itself is not already closed. This is called after all other finalization logic is executed.\n * @param shouldUnsubscribe An optional check to see if an unsubscribe call should truly unsubscribe.\n * NOTE: This currently **ONLY** exists to support the strange behavior of {@link groupBy}, where unsubscription\n * to the resulting observable does not actually disconnect from the source if there are active subscriptions\n * to any grouped observable. (DO NOT EXPOSE OR USE EXTERNALLY!!!)\n */\n constructor(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n private onFinalize?: () => void,\n private shouldUnsubscribe?: () => boolean\n ) {\n // It's important - for performance reasons - that all of this class's\n // members are initialized and that they are always initialized in the same\n // order. This will ensure that all OperatorSubscriber instances have the\n // same hidden class in V8. This, in turn, will help keep the number of\n // hidden classes involved in property accesses within the base class as\n // low as possible. If the number of hidden classes involved exceeds four,\n // the property accesses will become megamorphic and performance penalties\n // will be incurred - i.e. inline caches won't be used.\n //\n // The reasons for ensuring all instances have the same hidden class are\n // further discussed in this blog post from Benedikt Meurer:\n // https://benediktmeurer.de/2018/03/23/impact-of-polymorphism-on-component-based-frameworks-like-react/\n super(destination);\n this._next = onNext\n ? function (this: OperatorSubscriber, value: T) {\n try {\n onNext(value);\n } catch (err) {\n destination.error(err);\n }\n }\n : super._next;\n this._error = onError\n ? function (this: OperatorSubscriber, err: any) {\n try {\n onError(err);\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._error;\n this._complete = onComplete\n ? function (this: OperatorSubscriber) {\n try {\n onComplete();\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._complete;\n }\n\n unsubscribe() {\n if (!this.shouldUnsubscribe || this.shouldUnsubscribe()) {\n const { closed } = this;\n super.unsubscribe();\n // Execute additional teardown if we have any and we didn't already do so.\n !closed && this.onFinalize?.();\n }\n }\n}\n", "import { Subscription } from '../Subscription';\n\ninterface AnimationFrameProvider {\n schedule(callback: FrameRequestCallback): Subscription;\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n delegate:\n | {\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n }\n | undefined;\n}\n\nexport const animationFrameProvider: AnimationFrameProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n schedule(callback) {\n let request = requestAnimationFrame;\n let cancel: typeof cancelAnimationFrame | undefined = cancelAnimationFrame;\n const { delegate } = animationFrameProvider;\n if (delegate) {\n request = delegate.requestAnimationFrame;\n cancel = delegate.cancelAnimationFrame;\n }\n const handle = request((timestamp) => {\n // Clear the cancel function. The request has been fulfilled, so\n // attempting to cancel the request upon unsubscription would be\n // pointless.\n cancel = undefined;\n callback(timestamp);\n });\n return new Subscription(() => cancel?.(handle));\n },\n requestAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.requestAnimationFrame || requestAnimationFrame)(...args);\n },\n cancelAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.cancelAnimationFrame || cancelAnimationFrame)(...args);\n },\n delegate: undefined,\n};\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface ObjectUnsubscribedError extends Error {}\n\nexport interface ObjectUnsubscribedErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (): ObjectUnsubscribedError;\n}\n\n/**\n * An error thrown when an action is invalid because the object has been\n * unsubscribed.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n *\n * @class ObjectUnsubscribedError\n */\nexport const ObjectUnsubscribedError: ObjectUnsubscribedErrorCtor = createErrorClass(\n (_super) =>\n function ObjectUnsubscribedErrorImpl(this: any) {\n _super(this);\n this.name = 'ObjectUnsubscribedError';\n this.message = 'object unsubscribed';\n }\n);\n", "import { Operator } from './Operator';\nimport { Observable } from './Observable';\nimport { Subscriber } from './Subscriber';\nimport { Subscription, EMPTY_SUBSCRIPTION } from './Subscription';\nimport { Observer, SubscriptionLike, TeardownLogic } from './types';\nimport { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';\nimport { arrRemove } from './util/arrRemove';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A Subject is a special type of Observable that allows values to be\n * multicasted to many Observers. Subjects are like EventEmitters.\n *\n * Every Subject is an Observable and an Observer. You can subscribe to a\n * Subject, and you can call next to feed values as well as error and complete.\n */\nexport class Subject extends Observable implements SubscriptionLike {\n closed = false;\n\n private currentObservers: Observer[] | null = null;\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n observers: Observer[] = [];\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n isStopped = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n hasError = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n thrownError: any = null;\n\n /**\n * Creates a \"subject\" by basically gluing an observer to an observable.\n *\n * @nocollapse\n * @deprecated Recommended you do not use. Will be removed at some point in the future. Plans for replacement still under discussion.\n */\n static create: (...args: any[]) => any = (destination: Observer, source: Observable): AnonymousSubject => {\n return new AnonymousSubject(destination, source);\n };\n\n constructor() {\n // NOTE: This must be here to obscure Observable's constructor.\n super();\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n lift(operator: Operator): Observable {\n const subject = new AnonymousSubject(this, this);\n subject.operator = operator as any;\n return subject as any;\n }\n\n /** @internal */\n protected _throwIfClosed() {\n if (this.closed) {\n throw new ObjectUnsubscribedError();\n }\n }\n\n next(value: T) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n if (!this.currentObservers) {\n this.currentObservers = Array.from(this.observers);\n }\n for (const observer of this.currentObservers) {\n observer.next(value);\n }\n }\n });\n }\n\n error(err: any) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.hasError = this.isStopped = true;\n this.thrownError = err;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.error(err);\n }\n }\n });\n }\n\n complete() {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.isStopped = true;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.complete();\n }\n }\n });\n }\n\n unsubscribe() {\n this.isStopped = this.closed = true;\n this.observers = this.currentObservers = null!;\n }\n\n get observed() {\n return this.observers?.length > 0;\n }\n\n /** @internal */\n protected _trySubscribe(subscriber: Subscriber): TeardownLogic {\n this._throwIfClosed();\n return super._trySubscribe(subscriber);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._checkFinalizedStatuses(subscriber);\n return this._innerSubscribe(subscriber);\n }\n\n /** @internal */\n protected _innerSubscribe(subscriber: Subscriber) {\n const { hasError, isStopped, observers } = this;\n if (hasError || isStopped) {\n return EMPTY_SUBSCRIPTION;\n }\n this.currentObservers = null;\n observers.push(subscriber);\n return new Subscription(() => {\n this.currentObservers = null;\n arrRemove(observers, subscriber);\n });\n }\n\n /** @internal */\n protected _checkFinalizedStatuses(subscriber: Subscriber) {\n const { hasError, thrownError, isStopped } = this;\n if (hasError) {\n subscriber.error(thrownError);\n } else if (isStopped) {\n subscriber.complete();\n }\n }\n\n /**\n * Creates a new Observable with this Subject as the source. You can do this\n * to create custom Observer-side logic of the Subject and conceal it from\n * code that uses the Observable.\n * @return {Observable} Observable that the Subject casts to\n */\n asObservable(): Observable {\n const observable: any = new Observable();\n observable.source = this;\n return observable;\n }\n}\n\n/**\n * @class AnonymousSubject\n */\nexport class AnonymousSubject extends Subject {\n constructor(\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n public destination?: Observer,\n source?: Observable\n ) {\n super();\n this.source = source;\n }\n\n next(value: T) {\n this.destination?.next?.(value);\n }\n\n error(err: any) {\n this.destination?.error?.(err);\n }\n\n complete() {\n this.destination?.complete?.();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n return this.source?.subscribe(subscriber) ?? EMPTY_SUBSCRIPTION;\n }\n}\n", "import { TimestampProvider } from '../types';\n\ninterface DateTimestampProvider extends TimestampProvider {\n delegate: TimestampProvider | undefined;\n}\n\nexport const dateTimestampProvider: DateTimestampProvider = {\n now() {\n // Use the variable rather than `this` so that the function can be called\n // without being bound to the provider.\n return (dateTimestampProvider.delegate || Date).now();\n },\n delegate: undefined,\n};\n", "import { Subject } from './Subject';\nimport { TimestampProvider } from './types';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * A variant of {@link Subject} that \"replays\" old values to new subscribers by emitting them when they first subscribe.\n *\n * `ReplaySubject` has an internal buffer that will store a specified number of values that it has observed. Like `Subject`,\n * `ReplaySubject` \"observes\" values by having them passed to its `next` method. When it observes a value, it will store that\n * value for a time determined by the configuration of the `ReplaySubject`, as passed to its constructor.\n *\n * When a new subscriber subscribes to the `ReplaySubject` instance, it will synchronously emit all values in its buffer in\n * a First-In-First-Out (FIFO) manner. The `ReplaySubject` will also complete, if it has observed completion; and it will\n * error if it has observed an error.\n *\n * There are two main configuration items to be concerned with:\n *\n * 1. `bufferSize` - This will determine how many items are stored in the buffer, defaults to infinite.\n * 2. `windowTime` - The amount of time to hold a value in the buffer before removing it from the buffer.\n *\n * Both configurations may exist simultaneously. So if you would like to buffer a maximum of 3 values, as long as the values\n * are less than 2 seconds old, you could do so with a `new ReplaySubject(3, 2000)`.\n *\n * ### Differences with BehaviorSubject\n *\n * `BehaviorSubject` is similar to `new ReplaySubject(1)`, with a couple of exceptions:\n *\n * 1. `BehaviorSubject` comes \"primed\" with a single value upon construction.\n * 2. `ReplaySubject` will replay values, even after observing an error, where `BehaviorSubject` will not.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n * @see {@link shareReplay}\n */\nexport class ReplaySubject extends Subject {\n private _buffer: (T | number)[] = [];\n private _infiniteTimeWindow = true;\n\n /**\n * @param bufferSize The size of the buffer to replay on subscription\n * @param windowTime The amount of time the buffered items will stay buffered\n * @param timestampProvider An object with a `now()` method that provides the current timestamp. This is used to\n * calculate the amount of time something has been buffered.\n */\n constructor(\n private _bufferSize = Infinity,\n private _windowTime = Infinity,\n private _timestampProvider: TimestampProvider = dateTimestampProvider\n ) {\n super();\n this._infiniteTimeWindow = _windowTime === Infinity;\n this._bufferSize = Math.max(1, _bufferSize);\n this._windowTime = Math.max(1, _windowTime);\n }\n\n next(value: T): void {\n const { isStopped, _buffer, _infiniteTimeWindow, _timestampProvider, _windowTime } = this;\n if (!isStopped) {\n _buffer.push(value);\n !_infiniteTimeWindow && _buffer.push(_timestampProvider.now() + _windowTime);\n }\n this._trimBuffer();\n super.next(value);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._trimBuffer();\n\n const subscription = this._innerSubscribe(subscriber);\n\n const { _infiniteTimeWindow, _buffer } = this;\n // We use a copy here, so reentrant code does not mutate our array while we're\n // emitting it to a new subscriber.\n const copy = _buffer.slice();\n for (let i = 0; i < copy.length && !subscriber.closed; i += _infiniteTimeWindow ? 1 : 2) {\n subscriber.next(copy[i] as T);\n }\n\n this._checkFinalizedStatuses(subscriber);\n\n return subscription;\n }\n\n private _trimBuffer() {\n const { _bufferSize, _timestampProvider, _buffer, _infiniteTimeWindow } = this;\n // If we don't have an infinite buffer size, and we're over the length,\n // use splice to truncate the old buffer values off. Note that we have to\n // double the size for instances where we're not using an infinite time window\n // because we're storing the values and the timestamps in the same array.\n const adjustedBufferSize = (_infiniteTimeWindow ? 1 : 2) * _bufferSize;\n _bufferSize < Infinity && adjustedBufferSize < _buffer.length && _buffer.splice(0, _buffer.length - adjustedBufferSize);\n\n // Now, if we're not in an infinite time window, remove all values where the time is\n // older than what is allowed.\n if (!_infiniteTimeWindow) {\n const now = _timestampProvider.now();\n let last = 0;\n // Search the array for the first timestamp that isn't expired and\n // truncate the buffer up to that point.\n for (let i = 1; i < _buffer.length && (_buffer[i] as number) <= now; i += 2) {\n last = i;\n }\n last && _buffer.splice(0, last + 1);\n }\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Subscription } from '../Subscription';\nimport { SchedulerAction } from '../types';\n\n/**\n * A unit of work to be executed in a `scheduler`. An action is typically\n * created from within a {@link SchedulerLike} and an RxJS user does not need to concern\n * themselves about creating and manipulating an Action.\n *\n * ```ts\n * class Action extends Subscription {\n * new (scheduler: Scheduler, work: (state?: T) => void);\n * schedule(state?: T, delay: number = 0): Subscription;\n * }\n * ```\n *\n * @class Action\n */\nexport class Action extends Subscription {\n constructor(scheduler: Scheduler, work: (this: SchedulerAction, state?: T) => void) {\n super();\n }\n /**\n * Schedules this action on its parent {@link SchedulerLike} for execution. May be passed\n * some context object, `state`. May happen at some point in the future,\n * according to the `delay` parameter, if specified.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler.\n * @return {void}\n */\n public schedule(state?: T, delay: number = 0): Subscription {\n return this;\n }\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetIntervalFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearIntervalFunction = (handle: TimerHandle) => void;\n\ninterface IntervalProvider {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n delegate:\n | {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n }\n | undefined;\n}\n\nexport const intervalProvider: IntervalProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setInterval(handler: () => void, timeout?: number, ...args) {\n const { delegate } = intervalProvider;\n if (delegate?.setInterval) {\n return delegate.setInterval(handler, timeout, ...args);\n }\n return setInterval(handler, timeout, ...args);\n },\n clearInterval(handle) {\n const { delegate } = intervalProvider;\n return (delegate?.clearInterval || clearInterval)(handle as any);\n },\n delegate: undefined,\n};\n", "import { Action } from './Action';\nimport { SchedulerAction } from '../types';\nimport { Subscription } from '../Subscription';\nimport { AsyncScheduler } from './AsyncScheduler';\nimport { intervalProvider } from './intervalProvider';\nimport { arrRemove } from '../util/arrRemove';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncAction extends Action {\n public id: TimerHandle | undefined;\n public state?: T;\n // @ts-ignore: Property has no initializer and is not definitely assigned\n public delay: number;\n protected pending: boolean = false;\n\n constructor(protected scheduler: AsyncScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (this.closed) {\n return this;\n }\n\n // Always replace the current state with the new state.\n this.state = state;\n\n const id = this.id;\n const scheduler = this.scheduler;\n\n //\n // Important implementation note:\n //\n // Actions only execute once by default, unless rescheduled from within the\n // scheduled callback. This allows us to implement single and repeat\n // actions via the same code path, without adding API surface area, as well\n // as mimic traditional recursion but across asynchronous boundaries.\n //\n // However, JS runtimes and timers distinguish between intervals achieved by\n // serial `setTimeout` calls vs. a single `setInterval` call. An interval of\n // serial `setTimeout` calls can be individually delayed, which delays\n // scheduling the next `setTimeout`, and so on. `setInterval` attempts to\n // guarantee the interval callback will be invoked more precisely to the\n // interval period, regardless of load.\n //\n // Therefore, we use `setInterval` to schedule single and repeat actions.\n // If the action reschedules itself with the same delay, the interval is not\n // canceled. If the action doesn't reschedule, or reschedules with a\n // different delay, the interval will be canceled after scheduled callback\n // execution.\n //\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, delay);\n }\n\n // Set the pending flag indicating that this action has been scheduled, or\n // has recursively rescheduled itself.\n this.pending = true;\n\n this.delay = delay;\n // If this action has already an async Id, don't request a new one.\n this.id = this.id ?? this.requestAsyncId(scheduler, this.id, delay);\n\n return this;\n }\n\n protected requestAsyncId(scheduler: AsyncScheduler, _id?: TimerHandle, delay: number = 0): TimerHandle {\n return intervalProvider.setInterval(scheduler.flush.bind(scheduler, this), delay);\n }\n\n protected recycleAsyncId(_scheduler: AsyncScheduler, id?: TimerHandle, delay: number | null = 0): TimerHandle | undefined {\n // If this action is rescheduled with the same delay time, don't clear the interval id.\n if (delay != null && this.delay === delay && this.pending === false) {\n return id;\n }\n // Otherwise, if the action's delay time is different from the current delay,\n // or the action has been rescheduled before it's executed, clear the interval id\n if (id != null) {\n intervalProvider.clearInterval(id);\n }\n\n return undefined;\n }\n\n /**\n * Immediately executes this action and the `work` it contains.\n * @return {any}\n */\n public execute(state: T, delay: number): any {\n if (this.closed) {\n return new Error('executing a cancelled action');\n }\n\n this.pending = false;\n const error = this._execute(state, delay);\n if (error) {\n return error;\n } else if (this.pending === false && this.id != null) {\n // Dequeue if the action didn't reschedule itself. Don't call\n // unsubscribe(), because the action could reschedule later.\n // For example:\n // ```\n // scheduler.schedule(function doWork(counter) {\n // /* ... I'm a busy worker bee ... */\n // var originalAction = this;\n // /* wait 100ms before rescheduling the action */\n // setTimeout(function () {\n // originalAction.schedule(counter + 1);\n // }, 100);\n // }, 1000);\n // ```\n this.id = this.recycleAsyncId(this.scheduler, this.id, null);\n }\n }\n\n protected _execute(state: T, _delay: number): any {\n let errored: boolean = false;\n let errorValue: any;\n try {\n this.work(state);\n } catch (e) {\n errored = true;\n // HACK: Since code elsewhere is relying on the \"truthiness\" of the\n // return here, we can't have it return \"\" or 0 or false.\n // TODO: Clean this up when we refactor schedulers mid-version-8 or so.\n errorValue = e ? e : new Error('Scheduled action threw falsy error');\n }\n if (errored) {\n this.unsubscribe();\n return errorValue;\n }\n }\n\n unsubscribe() {\n if (!this.closed) {\n const { id, scheduler } = this;\n const { actions } = scheduler;\n\n this.work = this.state = this.scheduler = null!;\n this.pending = false;\n\n arrRemove(actions, this);\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, null);\n }\n\n this.delay = null!;\n super.unsubscribe();\n }\n }\n}\n", "import { Action } from './scheduler/Action';\nimport { Subscription } from './Subscription';\nimport { SchedulerLike, SchedulerAction } from './types';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * An execution context and a data structure to order tasks and schedule their\n * execution. Provides a notion of (potentially virtual) time, through the\n * `now()` getter method.\n *\n * Each unit of work in a Scheduler is called an `Action`.\n *\n * ```ts\n * class Scheduler {\n * now(): number;\n * schedule(work, delay?, state?): Subscription;\n * }\n * ```\n *\n * @class Scheduler\n * @deprecated Scheduler is an internal implementation detail of RxJS, and\n * should not be used directly. Rather, create your own class and implement\n * {@link SchedulerLike}. Will be made internal in v8.\n */\nexport class Scheduler implements SchedulerLike {\n public static now: () => number = dateTimestampProvider.now;\n\n constructor(private schedulerActionCtor: typeof Action, now: () => number = Scheduler.now) {\n this.now = now;\n }\n\n /**\n * A getter method that returns a number representing the current time\n * (at the time this function was called) according to the scheduler's own\n * internal clock.\n * @return {number} A number that represents the current time. May or may not\n * have a relation to wall-clock time. May or may not refer to a time unit\n * (e.g. milliseconds).\n */\n public now: () => number;\n\n /**\n * Schedules a function, `work`, for execution. May happen at some point in\n * the future, according to the `delay` parameter, if specified. May be passed\n * some context object, `state`, which will be passed to the `work` function.\n *\n * The given arguments will be processed an stored as an Action object in a\n * queue of actions.\n *\n * @param {function(state: ?T): ?Subscription} work A function representing a\n * task, or some unit of work to be executed by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler itself.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @return {Subscription} A subscription in order to be able to unsubscribe\n * the scheduled work.\n */\n public schedule(work: (this: SchedulerAction, state?: T) => void, delay: number = 0, state?: T): Subscription {\n return new this.schedulerActionCtor(this, work).schedule(state, delay);\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Action } from './Action';\nimport { AsyncAction } from './AsyncAction';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncScheduler extends Scheduler {\n public actions: Array> = [];\n /**\n * A flag to indicate whether the Scheduler is currently executing a batch of\n * queued actions.\n * @type {boolean}\n * @internal\n */\n public _active: boolean = false;\n /**\n * An internal ID used to track the latest asynchronous task such as those\n * coming from `setTimeout`, `setInterval`, `requestAnimationFrame`, and\n * others.\n * @type {any}\n * @internal\n */\n public _scheduled: TimerHandle | undefined;\n\n constructor(SchedulerAction: typeof Action, now: () => number = Scheduler.now) {\n super(SchedulerAction, now);\n }\n\n public flush(action: AsyncAction): void {\n const { actions } = this;\n\n if (this._active) {\n actions.push(action);\n return;\n }\n\n let error: any;\n this._active = true;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions.shift()!)); // exhaust the scheduler queue\n\n this._active = false;\n\n if (error) {\n while ((action = actions.shift()!)) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\n/**\n *\n * Async Scheduler\n *\n * Schedule task as if you used setTimeout(task, duration)\n *\n * `async` scheduler schedules tasks asynchronously, by putting them on the JavaScript\n * event loop queue. It is best used to delay tasks in time or to schedule tasks repeating\n * in intervals.\n *\n * If you just want to \"defer\" task, that is to perform it right after currently\n * executing synchronous code ends (commonly achieved by `setTimeout(deferredTask, 0)`),\n * better choice will be the {@link asapScheduler} scheduler.\n *\n * ## Examples\n * Use async scheduler to delay task\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * const task = () => console.log('it works!');\n *\n * asyncScheduler.schedule(task, 2000);\n *\n * // After 2 seconds logs:\n * // \"it works!\"\n * ```\n *\n * Use async scheduler to repeat task in intervals\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * function task(state) {\n * console.log(state);\n * this.schedule(state + 1, 1000); // `this` references currently executing Action,\n * // which we reschedule with new state and delay\n * }\n *\n * asyncScheduler.schedule(task, 3000, 0);\n *\n * // Logs:\n * // 0 after 3s\n * // 1 after 4s\n * // 2 after 5s\n * // 3 after 6s\n * ```\n */\n\nexport const asyncScheduler = new AsyncScheduler(AsyncAction);\n\n/**\n * @deprecated Renamed to {@link asyncScheduler}. Will be removed in v8.\n */\nexport const async = asyncScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\nimport { SchedulerAction } from '../types';\nimport { animationFrameProvider } from './animationFrameProvider';\nimport { TimerHandle } from './timerHandle';\n\nexport class AnimationFrameAction extends AsyncAction {\n constructor(protected scheduler: AnimationFrameScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n protected requestAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay is greater than 0, request as an async action.\n if (delay !== null && delay > 0) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n // Push the action to the end of the scheduler queue.\n scheduler.actions.push(this);\n // If an animation frame has already been requested, don't request another\n // one. If an animation frame hasn't been requested yet, request one. Return\n // the current animation frame request id.\n return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(() => scheduler.flush(undefined)));\n }\n\n protected recycleAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle | undefined {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n if (delay != null ? delay > 0 : this.delay > 0) {\n return super.recycleAsyncId(scheduler, id, delay);\n }\n // If the scheduler queue has no remaining actions with the same async id,\n // cancel the requested animation frame and set the scheduled flag to\n // undefined so the next AnimationFrameAction will request its own.\n const { actions } = scheduler;\n if (id != null && actions[actions.length - 1]?.id !== id) {\n animationFrameProvider.cancelAnimationFrame(id as number);\n scheduler._scheduled = undefined;\n }\n // Return undefined so the action knows to request a new async id if it's rescheduled.\n return undefined;\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\nexport class AnimationFrameScheduler extends AsyncScheduler {\n public flush(action?: AsyncAction): void {\n this._active = true;\n // The async id that effects a call to flush is stored in _scheduled.\n // Before executing an action, it's necessary to check the action's async\n // id to determine whether it's supposed to be executed in the current\n // flush.\n // Previous implementations of this method used a count to determine this,\n // but that was unsound, as actions that are unsubscribed - i.e. cancelled -\n // are removed from the actions array and that can shift actions that are\n // scheduled to be executed in a subsequent flush into positions at which\n // they are executed within the current flush.\n const flushId = this._scheduled;\n this._scheduled = undefined;\n\n const { actions } = this;\n let error: any;\n action = action || actions.shift()!;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions[0]) && action.id === flushId && actions.shift());\n\n this._active = false;\n\n if (error) {\n while ((action = actions[0]) && action.id === flushId && actions.shift()) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AnimationFrameAction } from './AnimationFrameAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\n\n/**\n *\n * Animation Frame Scheduler\n *\n * Perform task when `window.requestAnimationFrame` would fire\n *\n * When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler\n * behaviour.\n *\n * Without delay, `animationFrame` scheduler can be used to create smooth browser animations.\n * It makes sure scheduled task will happen just before next browser content repaint,\n * thus performing animations as efficiently as possible.\n *\n * ## Example\n * Schedule div height animation\n * ```ts\n * // html:
\n * import { animationFrameScheduler } from 'rxjs';\n *\n * const div = document.querySelector('div');\n *\n * animationFrameScheduler.schedule(function(height) {\n * div.style.height = height + \"px\";\n *\n * this.schedule(height + 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * }, 0, 0);\n *\n * // You will see a div element growing in height\n * ```\n */\n\nexport const animationFrameScheduler = new AnimationFrameScheduler(AnimationFrameAction);\n\n/**\n * @deprecated Renamed to {@link animationFrameScheduler}. Will be removed in v8.\n */\nexport const animationFrame = animationFrameScheduler;\n", "import { Observable } from '../Observable';\nimport { SchedulerLike } from '../types';\n\n/**\n * A simple Observable that emits no items to the Observer and immediately\n * emits a complete notification.\n *\n * Just emits 'complete', and nothing else.\n *\n * ![](empty.png)\n *\n * A simple Observable that only emits the complete notification. It can be used\n * for composing with other Observables, such as in a {@link mergeMap}.\n *\n * ## Examples\n *\n * Log complete notification\n *\n * ```ts\n * import { EMPTY } from 'rxjs';\n *\n * EMPTY.subscribe({\n * next: () => console.log('Next'),\n * complete: () => console.log('Complete!')\n * });\n *\n * // Outputs\n * // Complete!\n * ```\n *\n * Emit the number 7, then complete\n *\n * ```ts\n * import { EMPTY, startWith } from 'rxjs';\n *\n * const result = EMPTY.pipe(startWith(7));\n * result.subscribe(x => console.log(x));\n *\n * // Outputs\n * // 7\n * ```\n *\n * Map and flatten only odd numbers to the sequence `'a'`, `'b'`, `'c'`\n *\n * ```ts\n * import { interval, mergeMap, of, EMPTY } from 'rxjs';\n *\n * const interval$ = interval(1000);\n * const result = interval$.pipe(\n * mergeMap(x => x % 2 === 1 ? of('a', 'b', 'c') : EMPTY),\n * );\n * result.subscribe(x => console.log(x));\n *\n * // Results in the following to the console:\n * // x is equal to the count on the interval, e.g. (0, 1, 2, 3, ...)\n * // x will occur every 1000ms\n * // if x % 2 is equal to 1, print a, b, c (each on its own)\n * // if x % 2 is not equal to 1, nothing will be output\n * ```\n *\n * @see {@link Observable}\n * @see {@link NEVER}\n * @see {@link of}\n * @see {@link throwError}\n */\nexport const EMPTY = new Observable((subscriber) => subscriber.complete());\n\n/**\n * @param scheduler A {@link SchedulerLike} to use for scheduling\n * the emission of the complete notification.\n * @deprecated Replaced with the {@link EMPTY} constant or {@link scheduled} (e.g. `scheduled([], scheduler)`). Will be removed in v8.\n */\nexport function empty(scheduler?: SchedulerLike) {\n return scheduler ? emptyScheduled(scheduler) : EMPTY;\n}\n\nfunction emptyScheduled(scheduler: SchedulerLike) {\n return new Observable((subscriber) => scheduler.schedule(() => subscriber.complete()));\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport function isScheduler(value: any): value is SchedulerLike {\n return value && isFunction(value.schedule);\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\nimport { isScheduler } from './isScheduler';\n\nfunction last(arr: T[]): T | undefined {\n return arr[arr.length - 1];\n}\n\nexport function popResultSelector(args: any[]): ((...args: unknown[]) => unknown) | undefined {\n return isFunction(last(args)) ? args.pop() : undefined;\n}\n\nexport function popScheduler(args: any[]): SchedulerLike | undefined {\n return isScheduler(last(args)) ? args.pop() : undefined;\n}\n\nexport function popNumber(args: any[], defaultValue: number): number {\n return typeof last(args) === 'number' ? args.pop()! : defaultValue;\n}\n", "export const isArrayLike = ((x: any): x is ArrayLike => x && typeof x.length === 'number' && typeof x !== 'function');", "import { isFunction } from \"./isFunction\";\n\n/**\n * Tests to see if the object is \"thennable\".\n * @param value the object to test\n */\nexport function isPromise(value: any): value is PromiseLike {\n return isFunction(value?.then);\n}\n", "import { InteropObservable } from '../types';\nimport { observable as Symbol_observable } from '../symbol/observable';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being Observable (but not necessary an Rx Observable) */\nexport function isInteropObservable(input: any): input is InteropObservable {\n return isFunction(input[Symbol_observable]);\n}\n", "import { isFunction } from './isFunction';\n\nexport function isAsyncIterable(obj: any): obj is AsyncIterable {\n return Symbol.asyncIterator && isFunction(obj?.[Symbol.asyncIterator]);\n}\n", "/**\n * Creates the TypeError to throw if an invalid object is passed to `from` or `scheduled`.\n * @param input The object that was passed.\n */\nexport function createInvalidObservableTypeError(input: any) {\n // TODO: We should create error codes that can be looked up, so this can be less verbose.\n return new TypeError(\n `You provided ${\n input !== null && typeof input === 'object' ? 'an invalid object' : `'${input}'`\n } where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.`\n );\n}\n", "export function getSymbolIterator(): symbol {\n if (typeof Symbol !== 'function' || !Symbol.iterator) {\n return '@@iterator' as any;\n }\n\n return Symbol.iterator;\n}\n\nexport const iterator = getSymbolIterator();\n", "import { iterator as Symbol_iterator } from '../symbol/iterator';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being an Iterable */\nexport function isIterable(input: any): input is Iterable {\n return isFunction(input?.[Symbol_iterator]);\n}\n", "import { ReadableStreamLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport async function* readableStreamLikeToAsyncGenerator(readableStream: ReadableStreamLike): AsyncGenerator {\n const reader = readableStream.getReader();\n try {\n while (true) {\n const { value, done } = await reader.read();\n if (done) {\n return;\n }\n yield value!;\n }\n } finally {\n reader.releaseLock();\n }\n}\n\nexport function isReadableStreamLike(obj: any): obj is ReadableStreamLike {\n // We don't want to use instanceof checks because they would return\n // false for instances from another Realm, like an + + +
+

Two simple ways of putting clips together is to concatenate them (to play them one after the other in a single long clip) or to compositing (to them side by side in a single larger clip).

+

Concatenating clips

+

Concatenating means playing the clips one after the other in a single long clip. The function concatenate_videoclips takes a list of clips and returns a new clip that is the concatenation of all the clips in the list. +Concatenation is done with the function concatenate_videoclips:

+
1
+2
+3
+4
+5
from vidiopy import VideoFileClip, ImageClip, concatenate_videoclips
+clip1 = VideoFileClip("video.mp4").subclip(0,5)
+clip2 = ImageClip("image.jpg").set_duration(5)
+f_clip = concatenate_videoclips([clip1,clip2], fps=24, over_scale=True)
+f_clip.write_videofile("output.mp4")
+
+

The f_clip is a clip that plays the clips 1, and 2 one after the other. Note that the clips do not need to be the same size. If they aren't they will all appear centered in a clip large enough to contain the biggest of them, with optionally a color of your choosing to fill the borders. You have many other options there (see the doc of the function).

+

Compositing Clip

+

Compositing is done with the function composite_videoclips:

+
video = CompositeVideoClip([clip1,clip2,clip3])
+
+

Now video plays clip1, and clip2 on top of clip1, and clip3 on top of clip1, and clip2. For instance, if clip2 and clip3 have the same size as clip1, then only clip3, which is on top, will be visible in the video… unless clip3 and clip2 have masks which hide parts of them. Note that by default the composition has the size of the largest clip or first if bg_clip=True.

+

Starting and stopping times

+

In a CompositionClip, all the clips start to play at a time that is specified by the clip.start attribute. You can set this starting time as follows:

+

clip1 = clip1.with_start(5) # start after 5 seconds +So for instance your composition will look like

+
1
+2
+3
video = CompositeVideoClip([clip1, # starts at t=0
+                            clip2.with_start(5), # start at t=5s
+                            clip3.with_start(9)]) # start at t=9s
+
+

In the example above, maybe clip2 will start before clip1 is over.

+

Positioning clips

+

If clip2 and clip3 are smaller than clip1, you can decide where they will appear in the composition by setting their position. Here we indicate the coordinates of the top-left pixel of the clips:

+
1
+2
+3
video = CompositeVideoClip([clip1,
+                           clip2.with_position((45,150)),
+                           clip3.with_position((90,100))])
+
+

There are many ways to specify the position:

+
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
clip2.with_position((45,150)) # x=45, y=150 , in pixels
+
+clip2.with_position("center") # automatically centered
+
+# clip2 is horizontally centered, and at the top of the picture
+clip2.with_position(("center","top"))
+
+# clip2 is vertically centered, at the left of the picture
+clip2.with_position(("left","center"))
+
+# clip2 is at 40% of the width, 70% of the height of the screen:
+clip2.with_position((0.4,0.7), relative=True)
+
+# clip2's position is horizontally centered, and moving down!
+clip2.with_position(lambda t: ('center', 50+t) )
+
+

When indicating the position keep in mind that the y coordinate has its zero at the top of the picture:

+
+ videoWH +
+ +

Compositing audio clips

+

When you mix video clips together, MoviePy will automatically compose their respective audio tracks to form the audio track of the final clip, so you don’t need to worry about compositing these tracks yourself.

+

If you want to make a custom audiotrack from several audio sources: audioc clips can be mixed together with CompositeAudioClip and concatenate_audioclips:

+
1
+2
+3
+4
+5
+6
from moviepy import *
+# ... make some audio clips aclip1, aclip2, aclip3
+concat = concatenate_audioclips([aclip1, aclip2, aclip3])
+compo = CompositeAudioClip([aclip1.multiply_volume(1.2),
+                            aclip2.with_start(5), # start at t=5s
+                            aclip3.with_start(9)])
+
+ + + + + + + + + + + +

Comments

+ + + + + + + + + + + + + + + + + +
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/getting_started/quick_presentation/index.html b/getting_started/quick_presentation/index.html new file mode 100644 index 0000000..626cb0d --- /dev/null +++ b/getting_started/quick_presentation/index.html @@ -0,0 +1,970 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Quick Presentation - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Getting started to use VidioPy

+

Advantages and limitations

+

VidioPy has been developed with the following goals in mind:

+

Advantages:

+
    +
  • Simple syntax for cutting, concatenations, title insertions, video compositing, video processing, and creation of custom effects.
  • +
  • Same syntax for all operating systems (Linux, MacOX, Windows).
  • +
  • Flexible : You have total control over the frames of the video and audio, and creating your own effects is easy as Py.
  • +
  • Fast : you can batch operations as much as you want, backend in ffmpeg, pillow, numpy, etc. for speed.
  • +
  • Supports most video formats and codecs. & Question Support.
  • +
+

limitations:

+
    +
  • still in development.
  • +
  • less documentation & Features.
  • +
+

How Vidiopy works

+

Vidiopy Uses the ffmpeg (1) library to read and write video files. The processing of the different media is is proceed using modules like Numpy, opencv, Pillow, ETC.

+
    +
  1. ffmpeg is a tool for handling multimedia files. It is used for reading and writing video files, and for converting between different video and audio formats.
  2. +
+
flowchart LR
+    subgraph clips
+        video(film reel)
+        audio(sound wave)
+        pictures(image)
+    end
+    Processing[numpy, opencv, pillow, etc]
+    subgraph processing
+        Processing
+    end
+    subgraph output
+        Output_Image(Image Sequence, Image File)
+        Output_Video(Video File)
+        Output_Audio(Audio File)
+    end
+    video -->|ffmpeg| processing
+    audio -->|ffmpeg| processing
+    pictures -->|ffmpeg or pillow| processing
+    processing -->|ffmpeg| Output_Video
+    processing -->|ffmpeg| Output_Audio
+    processing -->|ffmpeg or pillow| Output_Image
+

Example code

+
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
from vidiopy import VideoFileClip, TextClip
+
+# Load myHolidays.mp4 and trimming it to 10 seconds. 50s to 60s.
+clip = VideoFileClip("myHolidays.mp4").subclip(50,60)
+
+# Generate a text clip. You can customize the font, color, etc.
+txt_clip = TextClip("My Holidays 2013", font_size=70, txt_color='white', bg_color='gray', font=r'path/to/font.ttf')
+txt_clip = txt_clip.set_pos('center', 'right').set_duration(10)
+
+# Overlay the text clip on the first video clip
+video = CompositeVideoClip([clip, txt_clip])
+
+# Write the result to a video file in any format
+video.write_videofile("myHolidays_edited.webm")
+video.write_videofile("myHolidays_edited.mp4")
+video.write_videofile("myHolidays_edited.avi")
+video.write_videofile("myHolidays_edited.mkv")
+
+# Writing single frame
+video.save_frame("frame.png", t=0.5) # t= time in seconds
+
+# Writing Image Sequence
+video.write_image_sequence("image%03d.png", fps=24) # %03d are placeholders for the numbers 001, 002, 003, etc. fps = frames per second
+video.write_image_sequence("image%03d.jpg", fps=24) # %03d are placeholders for the numbers 001, 002, 003, etc. fps = frames per second
+video.write_image_sequence("image%03d.bmp", fps=24) # %03d are placeholders for the numbers 001, 002, 003, etc. fps = frames per second
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/getting_started/read&write/index.html b/getting_started/read&write/index.html new file mode 100644 index 0000000..ebe944c --- /dev/null +++ b/getting_started/read&write/index.html @@ -0,0 +1,929 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Read & Write - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Reading/Writing Video & Audio

+

Reading Video from file

+

The first step of video editing is to reading them from file. this Van be Done using vidiopy.VideoFileClip class. This class takes the path of the video file as input and returns a video which inherits from VideoClip class.

+
1
+2
+3
import vidiopy
+video = vidiopy.VideoFileClip("path/to/video.extension") # you can perform the operations on the video object
+video_without_audio = vidiopy.VideoFileClip("path/to/video.extension", audio=False) # defaults to `audio=True`
+
+

if the video do not have the audio then it will create a silence clip

+

Writing Video to file

+

To Write the Video we can use the write_videofile function inside the VideoClip. +Other clip type inherent it from the VideoClip.

+
1
+2
+3
import vidiopy
+video = vidiopy.VideoFileClip("path/to/video.extension")
+video.write_videofile("path/to/output/video.extension", fps=30) # fps is optional it will default use the fps of the video if it is set
+
+

Reading Audio from file

+

To read the audio from the file we can use the AudioFileClip class. This class takes the path of the audio file as input and returns a audio which inherits from AudioClip class.

+
1
+2
import vidiopy
+audio = vidiopy.AudioFileClip("path/to/audio.extension")
+
+

Writing Audio to file

+

To Write the Audio we can use the write_audiofile function inside the AudioClip. +Other clip type inherent it from the AudioClip.

+
1
+2
+3
import vidiopy
+audio = vidiopy.AudioFileClip("path/to/audio.extension")
+audio.write_audiofile("path/to/output/audio.extension")
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 0000000..7ea43d0 --- /dev/null +++ b/index.html @@ -0,0 +1,657 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

VidioPy

+

VidioPy is a Python library for video processing. It provides a simple API for common video processing tasks such as reading and writing video files, transforming video clips, performing basic operations like cuts, concatenations, and title insertions. It also supports video compositing (a.k.a. non-linear editing), advanced video effects, and video processing.

+

Overview

+

1.Download & Install

+

1.Getting Started

+

1.Reference Manual

+ + +

Contributing

+

VidioPy is an open source project originally developed by Soham & released under the MIT license. It is currently maintained by Soham. The code is hosted on Github, where you can push improvements, report bugs and ask for help. +We welcome all kinds of contributions, from code to documentation, to bug reports. Please read the contribution guidelines first.

+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/media/vidiopy_logo.png b/media/vidiopy_logo.png new file mode 100644 index 0000000..472ee12 Binary files /dev/null and b/media/vidiopy_logo.png differ diff --git a/more/CONTRIBUTING/index.html b/more/CONTRIBUTING/index.html new file mode 100644 index 0000000..9399bca --- /dev/null +++ b/more/CONTRIBUTING/index.html @@ -0,0 +1,776 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Contributing - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + +

VidioPy's Contribution Guidelines

+

Communication on GitHub

+
    +
  • Keep discussions on GitHub issues and pull requests focused and concise. Remember that each comment triggers a notification for multiple people.
  • +
  • Before making significant changes to the core codebase, discuss them with the team.
  • +
+

Setting Up Your Development Environment

+
    +
  • Fork the official VidioPy repository to your own GitHub account.
  • +
  • Clone the forked repository to your local machine.
  • +
  • Create and activate a Python virtual environment to isolate the project dependencies.
  • +
  • Navigate to the cloned directory and run pip install -e . to install the project dependencies.
  • +
  • Regularly sync your local repository with the main repository to stay up-to-date with the latest changes.
  • +
+

Coding Standards and Code Quality

+
    +
  • Adhere to the PEP8 coding conventions for Python.
  • +
  • Use comments judiciously and only when necessary. Aim to write self-explanatory code.
  • +
  • Choose clear and descriptive names for variables, functions, and classes.
  • +
  • Document new features or bug fixes with docstring. Update the documentation in the docs/markdown/ directory as needed.
  • +
  • Use Prettier to maintain consistent code formatting.
  • +
  • Review your code in PyCharm or VSCode to catch potential edge cases.
  • +
  • When adding new functions or features, update the corresponding unit tests or mention the need for new tests in your pull request.
  • +
  • read the Code Style Guide
  • +
+

Submitting Pull Requests

+
    +
  • You can submit a pull request (PR) even if your work is still in progress; it doesn't have to be fully finished.
  • +
  • Before submitting your PR, run the test suite using pytest to ensure your changes haven't broken anything.
  • +
  • Provide a clear and detailed description of your changes when submitting your PR. This will help the reviewers understand your work and expedite the review process.
  • +
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/more/code style guide/index.html b/more/code style guide/index.html new file mode 100644 index 0000000..c38560b --- /dev/null +++ b/more/code style guide/index.html @@ -0,0 +1,1020 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Style Guide - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + +

code style guide

+

functions and methods docstrings

+

function and method docstring template

+
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
"""\
+A Brief Description of the Function or Method
+
+#### Parameters:
+    - `param1` `type`: -
+        The first Doc.
+    - `param2` `type[type, type]`: -
+        The second Doc.
+    - `param3` `(type, optional, ...)`: -
+        The third Doc.
+    - `param4` `(type, optional, default=None)`: -
+        The fourth Doc.
+    - `*param5` `(type, optional, ...)`: -
+        The fifth Doc.
+    - `**param6` `(type, optional, ...)`: -
+        The sixth Doc.
+
+#### returns: # if return Multiple things
+    - `int`: - an a xyz.
+    - `float`: - an a abc
+
+#### return: `int` # if return Single thing
+    Doc goes here.
+
+#### return: `None` # if do not return anything
+
+#### raises: # add if needed
+    - `Error`: - if xyz.
+    - `Exception`: - if abc.
+
+#### Note: # add if needed
+    - xyz
+    - More notes.
+
+#### Warning: # add if needed
+    - xyz
+    - More warnings.
+
+#### examples:
+    example 1 :
+
+    \`\`\`python
+    >>> code
+    output
+    \`\`\`
+    example 2 :
+
+    \`\`\`python
+    code # explain
+    \`\`\`
+    - More examples.
+
+#### TODO: # add if needed
+    - xyz
+    - More TODOs.
+
+#### [function reference manual](https://github.com/SohamTilekar/vidiopy/blob/master/docs/...)
+
+"""
+
+

function and method docstring conventions

+
    +
  • Docstrings are always triple quoted strings use """ not '''.
  • +
  • add a blank line after the docstring.
  • +
  • use the #### for the sections.
  • +
  • add as much detail as possible.
  • +
  • add the link to Function or Method Reference manuel.
  • +
+

class docstrings

+

class docstring template

+
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
"""\
+A Brief Description of the Class
+
+properties:
+    - `property1`: - a short 1 line description of the property.
+    - `property2`: - a short 1 line description of the property.
+
+methods:
+    - `method1`: - a short 1 line description of the method.
+    - `method2`: - a short 1 line description of the method.
+
+abstract methods:
+    - `method1`: - a short 1 line description of the method.
+    - `method2`: - a short 1 line description of the method.
+
+#### Note: # add if needed
+    - xyz
+    - More notes.
+
+#### Warning: # add if needed
+    - xyz
+    - More warnings.
+
+#### examples:
+    example 1 :
+    \`\`\`python
+    >>> code
+    output
+    \`\`\`
+
+    example 2 :
+    \`\`\`python
+    code # explain
+    \`\`\`
+    - More examples.
+"""
+
+

class docstring conventions

+
    +
  • Docstrings are always triple quoted strings use """ not '''.
  • +
  • add a blank line after the docstring.
  • +
  • use the #### for the sections.
  • +
  • add as much detail as possible.
  • +
+

Comments

+
    +
  • Use as less comments as possible.
  • +
  • Use comments where code is not self explanatory or weird.
  • +
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/audio_clips/audioarrayclip/index.html b/reference_manual/clips/audio_clips/audioarrayclip/index.html new file mode 100644 index 0000000..b6a3704 --- /dev/null +++ b/reference_manual/clips/audio_clips/audioarrayclip/index.html @@ -0,0 +1,964 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + AudioArrayClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

AudioArrayClip

+
+

class vidiopy.AudioArrayClip

+
+
+
+

Bases: vidiopy.AudioClip

+

AudioArrayClip is a class that represents an audio clip from an array. It extends the AudioClip class.

+
+
Parameters:
+
+
    +
  • audio_data: np.ndarray: The audio data.
  • +
  • fps: int: The sample rate of the audio clip.
  • +
  • duration: int | float: The duration of the audio clip.
  • +
+
+
Example:
+
+

```python +import numpy as np +import vidiopy

+

audio_data = np.random.uniform(-1, 1, 44100 * 3) # 3 seconds of random audio +audio_clip = vidiopy.AudioArrayClip(audio_data, fps=44100) +```

+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/audio_clips/audioclip/index.html b/reference_manual/clips/audio_clips/audioclip/index.html new file mode 100644 index 0000000..d55eed2 --- /dev/null +++ b/reference_manual/clips/audio_clips/audioclip/index.html @@ -0,0 +1,1313 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + AudioClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

AudioClip

+
+

class vidiopy.AudioClip

+
+
+
+

Bases: vidiopy.Clip

+

The AudioClip class represents an audio clip. It is a subclass of the Clip class.

+
+
Parameters:
+
+
    +
  • duration (int or float, optional): The duration of the audio clip. Defaults to None.
  • +
  • fps (int, optional): Frames per second of the audio clip. Defaults to None.
  • +
+
+
Attributes:
+
+
    +
  • fps: int | None: The frames per second of the audio clip. Defaults to fps Parameter.
  • +
  • _original_dur: int | float | None: The original duration of the audio clip. Defaults to duration Parameter.
  • +
  • _audio_data: np.ndarray | None: The audio data of the clip. Defaults to None.
  • +
  • channels: int | None: The number of audio channels. Defaults to None.
  • +
  • _st: int | float: The start time of the audio clip. Defaults to 0.0.
  • +
  • _ed: int | float | None: The end time of the audio clip. Defaults to duration Parameter.
  • +
+
+
Properties:
+
+
+

audio_data: np.ndarray

+
+
+
+

This property gets the audio data. If the audio data is not set, it raises a ValueError.

+
+
Returns:
+
np.ndarray: The audio data.
+
Raises:
+
ValueError: If the audio data is not set.
+
+

Example: +: +

>>> clip = AudioClip()
+>>> clip.audio_data = np.array([1, 2, 3])
+>>> print(clip.audio_data)
+array([1, 2, 3])
+

+
+
+
+

duration: int | float

+
+
+
+

This property gets the duration of the audio clip. The duration is represented in seconds and can be an integer, a float, or None if the duration is not set.

+
+
Note:
+
You Can't Set the duration of the audio clip it is not allowed to change directly.
+
+

Raises: + AttributeError: Always raises an AttributeError if you try to set duration.

+
+
Returns:
+
int | float: The duration of the audio clip.
+
+

Example: +: +

>>> clip = AudioClip(duration=10)
+>>> print(clip.duration)
+10
+

+
+
+
+

start: int | float

+
+
+
+

This property gets the start time of the audio clip. The start time is represented in seconds and can be an integer or a float.

+
+
Returns:
+
int | float: The start time of the audio clip.
+
+

Example: +: +

>>> clip = AudioClip()
+>>> print(clip.start)
+0.0
+>>> clip.start = 5
+>>> print(clip.start)
+5
+

+
+
+
+

end: int | float | None

+
+
+
+

This property gets the end time of the audio clip. The end time is represented in seconds and can be an integer, a float, or None if the end time is not set.

+
+
Returns:
+
int | float | None: The end time of the audio clip.
+
+

Example: +: +

>>> clip = AudioClip(duration=10)
+>>> print(clip.end)
+10
+>>> clip.end = 5
+>>> print(clip.end)
+5
+

+
+
+
+
Methods:
+
+
+

def set_data(self, audio_data: np.ndarray) -> Self:

+
+
+
+

This method sets the audio data and returns the instance of the class.

+
+
Args:
+
audio_data (np.ndarray): The audio data to set.
+
Returns:
+
AudioClip: The instance of the class.
+
+

Example: +: +

    >>> clip = AudioClip()
+    >>> clip.set_data(np.array([1, 2, 3]))
+    >>> print(clip.audio_data)
+    array([1, 2, 3])
+

+
+
+
+

def set_fps(self, fps: int | None) -> Self:

+
+
+
+

This method sets the frames per second (fps) for the audio clip and returns the instance of the class.

+
+
Args:
+
fps: int | None: The frames per second to set. If None, the fps will be unset.
+
Returns:
+
AudioClip: Self The Instance of the class.
+
+

Example: +: +

>>> clip = AudioClip()
+>>> clip.set_fps(30)
+>>> print(clip.fps)
+30
+

+
+
+
+

def set_start(self, start: int | float) -> Self:

+
+
+
+

This method sets the start time of the audio clip and returns the instance of the class. +The start time is represented in seconds and can be an integer or a float.

+
+
Args:
+
start: int | float: The start time to set in seconds.
+
Returns:
+
AudioClip: The instance of the class with the updated start time.
+
+

Example: +: +

>>> clip = AudioClip()
+>>> clip.set_start(5)
+>>> print(clip.start)
+5
+

+
+
+
+

def set_end(self, end: int | float | None) -> Self:

+
+
+
+

This method sets the end time of the audio clip and returns the instance of the class. +The end time is represented in seconds and can be an integer, a float, or None if the end time is not to be set.

+
+
Args:
+
end: int | float | None: The end time to set in seconds.
+
Returns:
+
AudioClip: The instance of the class with the updated end time.
+
+

Example: +: +

>>> clip = AudioClip()
+>>> clip.set_end(10)
+>>> print(clip.end)
+10
+

+
+
+
+

def get_frame_at_t(self, t: int | float) -> np.ndarray:

+
+
+
+

This method gets the audio frame at a specific time t. The time t is represented in seconds and can be an integer or a float. +It calculates the frame index using the duration, total frames, and time t, and returns the audio data at that frame index.

+
+
Args:
+
t: int | float: The time in seconds at which to get the audio frame.
+
Returns:
+
np.ndarray: The audio data at the specified time.
+
Raises:
+
ValueError: If frames per second (fps) is not set, audio data is not set, or original duration is not set.
+
+
+
+
+

def iterate_frames_at_fps(self, fps: int | float | None = None) -> Generator[np.ndarray, None, None]:

+
+
+
+

This method generates audio frames at a specific frames per second (fps) rate. If no fps is provided, it uses the fps set in the AudioClip instance. +It calculates the original fps using the duration and total frames, then generates frames at the specified fps rate.

+
+
Args:
+
fps (int | float | None, optional): The frames per second rate at which to generate frames. If not provided, the fps set in the AudioClip instance is used.
+
Yields:
+
np.ndarray: The audio data at each frame.
+
Raises:
+
ValueError: If frames per second (fps) is not set, audio data is not set, or original duration is not set.
+
+
+
+
+

def iterate_all_frames(self) -> Generator[np.ndarray, None, None]:

+
+
+
+

This method generates all audio frames in the AudioClip instance. It iterates over each frame in the audio data and yields it.

+
+
Yields:
+
np.ndarray: The audio data at each frame.
+
Raises:
+
ValueError: If audio data is not set.
+
+
+
+
+

def fl_frame_transform(self, func, *args, **kwargs) -> Self:

+
+
+
+

This method applies a function to each frame of the audio data. The function should take a frame (an ndarray of channel data) as its first argument, +followed by any number of additional positional and keyword arguments.

+
+
Args:
+
+
    +
  • func (Callable): The function to apply to each frame. It should take a frame (an ndarray of channel data) as its first argument.
  • +
  • *args: Additional positional arguments to pass to the function.
  • +
  • **kwargs: Additional keyword arguments to pass to the function.
  • +
+
+
Returns:
+
AudioClip: The instance of the class with the transformed audio data.
+
Raises:
+
ValueError: If audio data is not set.
+
+
+
+
+

def fl_clip_transform(self, func, *args, **kwargs) -> Self:

+
+
+
+

This method applies a function to the entire audio data. The function should take the AudioClip instance as its first argument, +followed by any number of additional positional and keyword arguments.

+
+
Args:
+
+
    +
  • func (Callable): The function to apply to the audio data. It should take the AudioClip instance as its first argument.
  • +
  • *args: Additional positional arguments to pass to the function.
  • +
  • **kwargs: Additional keyword arguments to pass to the function.
  • +
+
+
Returns:
+
AudioClip: The instance of the class with the transformed audio data.
+
Raises:
+
ValueError: If audio data is not set.
+
+
+
+
+

def fl_time_transform(self, func: Callable[[int | float], int | float]) -> Self:

+
+
+
+

This method applies a time transformation function to the get_frame_at_t method of the AudioClip instance. +The transformation function should take a time (an integer or a float) as its argument and return a transformed time.

+

The get_frame_at_t method is replaced with a new method that applies the transformation function to its argument before calling the original method.

+
+
Args:
+
func (Callable[[int | float], int | float]): The time transformation function to apply. It should take a time (an integer or a float) as its argument and return a transformed time.
+
Returns:
+
AudioClip: The instance of the class with the transformed get_frame_at_t method.
+
Raises:
+
ValueError: If the get_frame_at_t method is not set.
+
+
+
+
+

def sub_clip_copy(self, start: float | int | None = None, end: float | int | None = None) -> Self

+
+
+
+

This method creates a copy of the AudioClip instance and then creates a subclip from the audio clip starting from start to end in the copied instance. +If start or end is not provided, it uses the start or end time set in the AudioClip instance. If neither is set, it uses 0 for start and the duration for end.

+

It calculates the original frames per second (fps) using the duration and total frames, then calculates the start and end frame indices using the original fps. +It then updates the audio data, original duration, end time, and start time of the copied AudioClip instance.

+
+
Args:
+
+
    +
  • start (float | int | None, optional): The start time of the subclip in seconds. If not provided, the start time set in the AudioClip instance is used. Defaults to None.
  • +
  • end (float | int | None, optional): The end time of the subclip in seconds. If not provided, the end time set in the AudioClip instance is used. Defaults to None.
  • +
+
+
Returns:
+
AudioClip: A copy of the instance of the class with the updated audio data, original duration, end time, and start time.
+
Raises:
+
ValueError: If audio data is not set, original duration is not set, or end time is greater than the original duration.
+
+
+
+
+

def copy(self) -> Self:

+
+
+
This method creates a deep copy of the AudioClip instance and returns it. It uses the copy_ function, which should be a deep copy function like copy.deepcopy in Python's standard library.
+
Returns:
+
AudioClip: A deep copy of the instance of the class.
+
Raises:
+
ValueError: If the copy_ function is not set or does not correctly create a deep copy.
+
> def write_audiofile(self, path: str, fps: int | None = None, overwrite=True, show_log=False, **kwargs) -> None:
+
+

This method writes the audio data to an audio file at the specified path. +It uses the frames per second (fps) if provided, otherwise it uses the fps set in the AudioClip instance. +It raises a ValueError if fps is not set in either way. +It also raises a ValueError if audio data, original duration, or channels are not set.

+

It creates a temporary audio data array by getting the frame at each time step from 0 to the end or duration with a step of 1/fps. +It then writes the temporary audio data to the audio file using the ffmpegio.audio.write function.

+
+
Args:
+
+
    +
  • path (str): The path to write the audio file to.
  • +
  • fps (int | None, optional): The frames per second to use. If not provided, the fps set in the AudioClip instance is used. Defaults to None.
  • +
  • overwrite (bool, optional): Whether to overwrite the audio file if it already exists. Defaults to True.
  • +
  • show_log (bool, optional): Whether to show the log of the ffmpegio.audio.write function. Defaults to False.
  • +
  • **kwargs: Additional keyword arguments to pass to the ffmpegio.audio.write function.
  • +
+
+
Raises:
+
ValueError: If fps is not set, audio data is not set, original duration is not set, or channels are not set.
+
+
+
+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/audio_clips/audiofileclip/index.html b/reference_manual/clips/audio_clips/audiofileclip/index.html new file mode 100644 index 0000000..80bf457 --- /dev/null +++ b/reference_manual/clips/audio_clips/audiofileclip/index.html @@ -0,0 +1,960 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + AudioFileClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

AudioFileClip

+
+

class vidiopy.AudioFileClip

+
+
+
+

Bases: vidiopy.SilenceClip

+

AudioFileClip is a class that represents an audio file. It extends the SilenceClip class.

+
+
Parameters:
+
+
    +
  • path: str | pathlib.Path: The path to the audio file.
  • +
  • duration (int | float | None, optional): The duration of the audio file. If not provided, it will be calculated from the audio file.
  • +
+
+
Raises:
+
+
    +
  • ValueError: If the audio file is empty and duration is not provided.
  • +
+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/audio_clips/mixingaudio/index.html b/reference_manual/clips/audio_clips/mixingaudio/index.html new file mode 100644 index 0000000..461a068 --- /dev/null +++ b/reference_manual/clips/audio_clips/mixingaudio/index.html @@ -0,0 +1,1004 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mixing AudioClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Concatenating Audio Clips

+
+

def concatenate_audioclips(clips: list[AudioClip], fps: int | None = 44100) -> AudioClip | AudioArrayClip:

+
+
+
+

Concatenates multiple audio clips into a single audio clip.

+
+
Parameters:
+
+
    +
  • clips: list[AudioClip]: A list of AudioClip objects to be concatenated.
  • +
  • fps (int, optional): The frames per second (fps) for the output AudioClip. If not provided, it defaults to 44100, or the maximum fps value found in the input clips.
  • +
+
+
Returns:
+
+

AudioClip | AudioArrayClip: The concatenated AudioClip. If the input clips have different channels, the output AudioClip will have the maximum number of channels found in the input clips, and the missing channels in the other clips will be filled with the mean value of their existing channels.

+
+
Raises:
+
+

ValueError: If no clips are provided, or if no fps value is found or set, or if a clip's channels are not set.

+
+
Note:
+
+
    +
  • The duration of the output AudioClip is the sum of the durations of the input clips.
  • +
  • If a clip's end time is set, it is used to calculate its duration; otherwise, its duration attribute is used.
  • +
  • If neither is set, a ValueError is raised.
  • +
+
+
+
+
+

Compositing Audio Clips

+
+

def composite_audioclips(clips: list[AudioClip], fps: int | None = 44100, use_bg_audio: bool = False) -> AudioArrayClip:

+
+
+
+

Composites multiple audio clips into a single audio clip.

+
+
Parameters:
+
+
    +
  • clips: list[AudioClip]: A list of AudioClip objects to be composited.
  • +
  • fps (int, optional): The frames per second (fps) for the output AudioClip. If not provided, it defaults to the maximum fps value found in the input clips.
  • +
  • use_bg_audio (bool, optional): If True, the first clip in the list is used as the background audio. The remaining clips are overlaid on top of this background audio. If False, a SilenceClip of the maximum duration found in the clips is used as the background audio.
  • +
+
+
Returns:
+
+

AudioArrayClip: The composited AudioClip. The output AudioClip will have the maximum number of channels found in the input clips, and the missing channels in the other clips will be filled with the mean value of their existing channels.

+
+
Raises:
+
+

ValueError: If no clips are provided, or if no fps value is found or set, or if a clip's channels are not set, or if no duration is found or set in the clips when use_bg_audio is False.

+
+
Note:
+
+
    +
  • The duration of the output AudioClip is the duration of the background audio.
  • +
  • If a clip's end time is set, it is used to calculate its duration; otherwise, its duration attribute is used.
  • +
  • If neither is set, a ValueError is raised.
  • +
+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/audio_clips/silenceclip/index.html b/reference_manual/clips/audio_clips/silenceclip/index.html new file mode 100644 index 0000000..c491050 --- /dev/null +++ b/reference_manual/clips/audio_clips/silenceclip/index.html @@ -0,0 +1,956 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SilenceClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Silence Clip

+
+

class vidiopy.SilenceClip

+
+
+
+

Bases: vidiopy.AudioClip

+

SilenceClip is a subclass of AudioClip that represents a silent audio clip.

+

It inherits from AudioClip therefore it has all the methods and attributes of AudioClip.

+
+
Parameters:
+
+
    +
  • duration: int | float: The duration of the audio clip.
  • +
  • fps (int, optional): The frames per second of the audio clip. Default is 44100.
  • +
  • channels (int, optional): The number of audio channels. Default is 1.
  • +
+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/clip/index.html b/reference_manual/clips/clip/index.html new file mode 100644 index 0000000..9b290db --- /dev/null +++ b/reference_manual/clips/clip/index.html @@ -0,0 +1,899 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Clip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Clip

+
+

class vidiopy.Clip.Clip

+
+
+
+

Bases: object

+

A Clip is the base class for all the clips (VideoClip and AudioClip).

+
+

fx(func, *args, **kwargs)

+
+
+
+

Apply a function to the current instance and return the result.

+

This method allows for the application of any callable to the current instance of the class. +The callable should take the instance as its first argument, followed by any number of positional and keyword arguments.

+
+
Parameters:
+
+
    +
  • func: (Callable[..., Self]): The function to apply. This should take the instance as its first argument.
  • +
  • *args: Variable length argument list for the function.
  • +
  • **kwargs: Arbitrary keyword arguments for the function.
  • +
+
+
Returns:
+
+
    +
  • Self: The result of applying the function to the instance.
  • +
+
+
Example:
+
>>> clip = Clip()
+>>> def do(instance):
+...     # Do something with instance.
+...     return instance.
+...
+>>> new_clip = clip.fx(do)
+
+
+
+
+
+

copy()

+
+
+
+

Creates a deep copy of the current Clip object.

+

This method creates a new instance of the Clip object, copying all the attributes of the current object into the new one. +If the current object has an 'audio' attribute, it also creates a deep copy of this 'audio' object and assigns it to the 'audio' attribute of the new Clip object.

+
+
Returns:
+
Clip: A new Clip object that is a deep copy of the current object.
+
+
+
+
+

close()

+
+
+
Release any resources that are in use.
+
+
+

__enter__()

+
+
+
Enter the context manager.
+
+
+

__exit__()

+
+
+
Exit the context manager.
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/video_clips/imageclips/index.html b/reference_manual/clips/video_clips/imageclips/index.html new file mode 100644 index 0000000..66ed873 --- /dev/null +++ b/reference_manual/clips/video_clips/imageclips/index.html @@ -0,0 +1,1549 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ImageClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

ImageClip

+
+

class vidiopy.ImageClip(image: str | Path | Image.Image | np.ndarray | None = None, fps: int | float | None = None, duration: int | float | None = None)

+
+
+
+

Bases: vidiopy.VideoClip

+

All Methods and properties of the VideoClip class are available.

+

A class representing a video clip generated from a single image.

+
+
Parameters:
+
+
    +
  • image: str | Path | Image.Image | np.ndarray | None: The image to use for the video clip. If None, an empty video clip is created.
  • +
  • fps: int | float | None: The frames per second of the video clip. If None, the fps is set to 30.
  • +
  • duration: int | float | None: The duration of the video clip in seconds. If None, the duration is set to 1.
  • +
+
+
Attributes:
+
+
    +
  • image: Image.Image: The image used for the video clip.
  • +
  • Other attributes are inherited from the VideoClip class.
  • +
+
+
Methods:
+
+
+

_import_image(self, image) -> Image.Image:

+
+
+
+

Import the image from various sources.

+

Does not made for external use.

+
+
Parameters:
+
image (str | Path | Image.Image | np.ndarray): Input image data.
+
Returns:
+
Image.Image: The imported image data.
+
+

This is a private method and not intended for external use.

+
+
+
+

You Can Use set_duration() & duration property to change _dur.

+

fl_frame_transform(self, func, *args, **kwargs) -> Self:

+
+
+
+

Apply a frame transformation function to the image.

+
+
Parameters:
+
func (Callable): The frame transformation function.
+
*args: Additional positional arguments for the function.
+
**kwargs: Additional keyword arguments for the function.
+
Returns:
+
ImageClip: A new ImageClip instance with the transformed image.
+
Note:
+
This method modifies the current ImageClip instance in-place.
+
Example Usage:
+
image_clip = ImageClip(image_path, fps=30, duration=5.0)
+transformed_clip = image_clip.fl_frame_transform(resize, width=640, height=480)
+
+
+
+
+
+

fl_frame_transform(self, func, *args, **kwargs) -> Self:

+
+
+
+

Apply a frame transformation function to the image.

+
+
Parameters:
+
func (Callable): The frame transformation function.
+
*args: Additional positional arguments for the function.
+
**kwargs: Additional keyword arguments for the function.
+
Returns:
+
ImageClip: A new ImageClip instance with the transformed image.
+
Note:
+
This method modifies the current ImageClip instance in-place.
+
Example Usage:
+
image_clip = ImageClip(image_path, fps=30, duration=5.0)
+transformed_clip = image_clip.fl_frame_transform(resize, width=640, height=480)
+
+
+
+
+
+

fl_clip_transform(self, func, *args, **kwargs) -> Self:

+
+
+
+

Raise a ValueError indicating that fl_clip is not applicable for ImageClip.

+

The Clip should be converted to VideoClip using to_video_clip method first.

+
+
Parameters:
+
func: Unused.
+
*args: Unused.
+
**kwargs: Unused.
+
Returns:
+
ImageClip: The current ImageClip instance.
+
Raises:
+
ValueError: This method is not applicable for ImageClip.
+
Example Usage:
+
image_clip = ImageClip(image_path, fps=30, duration=5.0)
+image_clip.fl_clip(some_function)  # Raises ValueError
+
+
+
+
+
+

fx(self, func: Callable, *args, **kwargs):

+
+
+
+

Apply a generic function to the ImageClip.

+
+
Parameters:
+
func (Callable): The function to apply.
+
*args: Additional positional arguments for the function.
+
**kwargs: Additional keyword arguments for the function.
+
Returns:
+
ImageClip: The current ImageClip instance.
+
Note:
+
This method modifies the current ImageClip instance in-place.
+
Example Usage:
+
def custom_function(image):
+    # Some custom processing on the image
+    return modified_image
+
+image_clip = ImageClip(image_path, fps=30, duration=5.0)
+image_clip.fx(custom_function, some_arg=42)
+
+
+
+
+
+

sub_fx(self, func, *args, start_t: int | float | None = None, end_t: int | float | None = None, **kwargs) -> Self:

+
+
+
+

Apply a custom function to the Image Clip.

+
+
Note:
+
Before using the sub_fx method, you need to convert the image clip to a video clip using to_video_clip() function.
+
Args:
+
func: The custom function to apply to the Image Clip.
+
*args: Additional positional arguments to pass to the custom function.
+
start_t (int | float | None): The start time of the subclip in seconds. If None, the subclip starts from the beginning.
+
end_t (int | float | None): The end time of the subclip in seconds. If None, the subclip ends at the last frame.
+
**kwargs: Additional keyword arguments to pass to the custom function.
+
Returns:
+
Self: The modified ImageClips instance.
+
Example:
+
# Convert the image clip to a video clip
+video_clip = image_clip.to_video_clip()
+
+# Apply a custom function to the video clip
+modified_clip = video_clip.sub_fx(custom_function, start_t=2, end_t=5)
+
+
Raises:
+
ValueError: If the method is called on an Image Clip instead of a Video Clip.
+
+
+
+
+

sub_clip_copy(self, start: int | float | None = None, end: int | float | None = None) -> Self:

+
+
+
+

Create a copy of the current clip and apply sub-clip operation. +Read more about sub-clip operation in the sub_clip method.

+
+
Args:
+
start (int | float | None): Start time of the sub-clip in seconds. +If None, the sub-clip starts from the beginning of the original clip.
+
end (int | float | None): End time of the sub-clip in seconds. +If None, the sub-clip ends at the end of the original clip.
+
Returns:
+
Self: A new instance of the clip with the sub-clip applied.
+
Example:
+
image_clip = ImageClip(image_path, fps=30, duration=5.0)
+sub_clip = image_clip.sub_clip_copy(start=2, end=5)
+
+
+
+
+
+

sub_clip(self, start: int | float | None = None, end: int | float | None = None) -> Self:

+
+
+
+

Returns a sub-clip of the current clip.

+
+
Args:
+
start (int | float | None, optional): The start time of the sub-clip in seconds. Defaults to None.
+
end (int | float | None, optional): The end time of the sub-clip in seconds. Defaults to None.
+
Returns:
+
Self: The sub-clip.
+
Note:
+
It modifies the current clip in-place. +If both start and end are None, the original clip is returned. +If start is None, it defaults to 0. +If end is None, it defaults to the end time of the original clip.
+
Example:
+
image_clip = ImageClip(image_path, fps=30, duration=5.0)
+image_clip.sub_clip(start=2, end=5)
+
+
+
+
+
+

make_frame_array(self, t):

+
+
+
+

Gives the numpy array representation of the image at a given time.

+
+
Args:
+
t (float): The timestamp of the frame.
+
Returns:
+
numpy.ndarray: The numpy array representation of the image.
+
Raises:
+
ValueError: If the image is not set.
+
+
+
+
+

make_frame_pil(self, t) -> Image.Image:

+
+
+
+

Returns the image frame at a given time.

+
+
Args:
+
t (float): The time at which to retrieve the frame.
+
Returns:
+
PIL.Image.Image: The image frame at the given time.
+
Raises:
+
ValueError: If the image is not set.
+
+
+
+
+

to_video_clip(self, fps=None, duration=None):

+
+
+
+

Convert ImageClip to VideoClip

+

If fps or duration is not provided, it defaults to the corresponding attribute +of the ImageClip instance. If those attributes are not available, a ValueError is raised.

+
+
Parameters:
+
fps (float, optional): Frames per second of the resulting video clip. +If not provided, it defaults to the fps attribute of the ImageClip instance. +If that is also not available, a ValueError is raised.
+
duration (float, optional): Duration of the resulting video clip in seconds. +If not provided, it defaults to the duration attribute of the ImageClip instance. +If that is also not available, a ValueError is raised.
+
Returns:
+
ImageSequenceClip: A VideoClip subclass instance generated from the ImageClip frames.
+
Raises:
+
ValueError: If fps or duration is not provided and the corresponding attribute is not available.
+
Note:
+
The to_video_clip method returns an instance of the ImageSequenceClip class, +which is a subclass of the VideoClip Class.
+
Example Usage:
+
# Example Usage
+image_clip = ImageClip()
+video_clip = image_clip.to_video_clip(fps=24, duration=10.0)
+video_clip.sub_fx(custom_function, start_t=2, end_t=5)
+
+
+
+
+
+
+
+
+

Data2ImageClip

+
+

class vidiopy.Data2ImageClip(data: np.ndarray | Image.Image, fps: int | float | None = None, duration: int | float | None = None)

+
+
+
+

Bases: vidiopy.ImageClip

+

A class representing a video clip generated from raw data (numpy array or PIL Image).

+

It extends the ImageClip class and allows users to create video clips from raw data, supporting either numpy arrays or PIL Images as input.

+
+
Parameters:
+
+
    +
  • data (np.ndarray or PIL Image): The raw data to be converted into a video clip.
  • +
  • fps (int | float | None): Frames per second of the video. If not provided, it will be inherited from the parent class (ImageClip) or set to the default value.
  • +
  • duration (int | float | None): Duration of the video in seconds. If not provided, it will be inherited from the parent class (ImageClip) or set to the default value.
  • +
+
+
Attributes:
+
+
    +
  • image (PIL Image): The PIL Image representation of the provided data.
  • +
  • size (tuple): The size (width, height) of the image.
  • +
+
+
Methods:
+
+
+

_import_image(self, image) -> Image.Image:

+
+
+
+

Private method to convert the provided data (numpy array or PIL Image) into a PIL Image.

+
+
Parameters:
+
image (np.ndarray or PIL Image): The raw data to be converted.
+
Returns:
+
Image.Image: The PIL Image representation of the provided data.
+
Raises:
+
TypeError: If the input type is not supported (neither numpy array nor PIL Image).
+
+
+
+
+
Example Usage:
+
+
# Import necessary libraries
+
+# Create a Data2ImageClip instance from a numpy array
+data_array = np.random.randint(0, 255, size=(480, 640, 3), dtype=np.uint8)
+video_clip = Data2ImageClip(data=data_array, fps=30, duration=5)
+
+# Create a Data2ImageClip instance from a PIL Image
+from PIL import Image
+data_image = Image.new('RGB', (640, 480), color='red')
+video_clip = Data2ImageClip(data=data_image, fps=24, duration=10)
+
+
+
Note:
+
+

The Data2ImageClip class extends the ImageClip. It allows users to create video clips from raw data, supporting either numpy arrays or PIL Images as input.

+
+
+
+
+ + +

ColorClip

+
+

class vidiopy.ColorClip(color: str | tuple[int, ...], mode="RGBA", size=(1, 1), fps=None, duration=None)

+
+
+
+

Bases: #!py vidiopy.Data2ImageClip

+

A video clip class with a solid color.

+

It extends the Data2ImageClip class and allows users to create video clips with a solid color.

+
+
Parameters:
+
+
    +
  • +

    color: str | tuple[int, ...]: Color of the image. It can be a color name (e.g., 'red', 'blue') or RGB tuple.

    +
    +Available Color Names +
      +
    • aliceblue: "#f0f8ff",
    • +
    • antiquewhite: "#faebd7",
    • +
    • aqua: "#00ffff",
    • +
    • aquamarine: "#7fffd4",
    • +
    • azure: "#f0ffff",
    • +
    • beige: "#f5f5dc",
    • +
    • bisque: "#ffe4c4",
    • +
    • black: "#000000",
    • +
    • blanchedalmond: "#ffebcd",
    • +
    • blue: "#0000ff",
    • +
    • blueviolet: "#8a2be2",
    • +
    • brown: "#a52a2a",
    • +
    • burlywood: "#deb887",
    • +
    • cadetblue: "#5f9ea0",
    • +
    • chartreuse: "#7fff00",
    • +
    • chocolate: "#d2691e",
    • +
    • coral: "#ff7f50",
    • +
    • cornflowerblue: "#6495ed",
    • +
    • cornsilk: "#fff8dc",
    • +
    • crimson: "#dc143c",
    • +
    • cyan: "#00ffff",
    • +
    • darkblue: "#00008b",
    • +
    • darkcyan: "#008b8b",
    • +
    • darkgoldenrod: "#b8860b",
    • +
    • darkgray: "#a9a9a9",
    • +
    • darkgrey: "#a9a9a9",
    • +
    • darkgreen: "#006400",
    • +
    • darkkhaki: "#bdb76b",
    • +
    • darkmagenta: "#8b008b",
    • +
    • darkolivegreen: "#556b2f",
    • +
    • darkorange: "#ff8c00",
    • +
    • darkorchid: "#9932cc",
    • +
    • darkred: "#8b0000",
    • +
    • darksalmon: "#e9967a",
    • +
    • darkseagreen: "#8fbc8f",
    • +
    • darkslateblue: "#483d8b",
    • +
    • darkslategray: "#2f4f4f",
    • +
    • darkslategrey: "#2f4f4f",
    • +
    • darkturquoise: "#00ced1",
    • +
    • darkviolet: "#9400d3",
    • +
    • deeppink: "#ff1493",
    • +
    • deepskyblue: "#00bfff",
    • +
    • dimgray: "#696969",
    • +
    • dimgrey: "#696969",
    • +
    • dodgerblue: "#1e90ff",
    • +
    • firebrick: "#b22222",
    • +
    • floralwhite: "#fffaf0",
    • +
    • forestgreen: "#228b22",
    • +
    • fuchsia: "#ff00ff",
    • +
    • gainsboro: "#dcdcdc",
    • +
    • ghostwhite: "#f8f8ff",
    • +
    • gold: "#ffd700",
    • +
    • goldenrod: "#daa520",
    • +
    • gray: "#808080",
    • +
    • grey: "#808080",
    • +
    • green: "#008000",
    • +
    • greenyellow: "#adff2f",
    • +
    • honeydew: "#f0fff0",
    • +
    • hotpink: "#ff69b4",
    • +
    • indianred: "#cd5c5c",
    • +
    • indigo: "#4b0082",
    • +
    • ivory: "#fffff0",
    • +
    • khaki: "#f0e68c",
    • +
    • lavender: "#e6e6fa",
    • +
    • lavenderblush: "#fff0f5",
    • +
    • lawngreen: "#7cfc00",
    • +
    • lemonchiffon: "#fffacd",
    • +
    • lightblue: "#add8e6",
    • +
    • lightcoral: "#f08080",
    • +
    • lightcyan: "#e0ffff",
    • +
    • lightgoldenrodyellow: "#fafad2",
    • +
    • lightgreen: "#90ee90",
    • +
    • lightgray: "#d3d3d3",
    • +
    • lightgrey: "#d3d3d3",
    • +
    • lightpink: "#ffb6c1",
    • +
    • lightsalmon: "#ffa07a",
    • +
    • lightseagreen: "#20b2aa",
    • +
    • lightskyblue: "#87cefa",
    • +
    • lightslategray: "#778899",
    • +
    • lightslategrey: "#778899",
    • +
    • lightsteelblue: "#b0c4de",
    • +
    • lightyellow: "#ffffe0",
    • +
    • lime: "#00ff00",
    • +
    • limegreen: "#32cd32",
    • +
    • linen: "#faf0e6",
    • +
    • magenta: "#ff00ff",
    • +
    • maroon: "#800000",
    • +
    • mediumaquamarine: "#66cdaa",
    • +
    • mediumblue: "#0000cd",
    • +
    • mediumorchid: "#ba55d3",
    • +
    • mediumpurple: "#9370db",
    • +
    • mediumseagreen: "#3cb371",
    • +
    • mediumslateblue: "#7b68ee",
    • +
    • mediumspringgreen: "#00fa9a",
    • +
    • mediumturquoise: "#48d1cc",
    • +
    • mediumvioletred: "#c71585",
    • +
    • midnightblue: "#191970",
    • +
    • mintcream: "#f5fffa",
    • +
    • mistyrose: "#ffe4e1",
    • +
    • moccasin: "#ffe4b5",
    • +
    • navajowhite: "#ffdead",
    • +
    • navy: "#000080",
    • +
    • oldlace: "#fdf5e6",
    • +
    • olive: "#808000",
    • +
    • olivedrab: "#6b8e23",
    • +
    • orange: "#ffa500",
    • +
    • orangered: "#ff4500",
    • +
    • orchid: "#da70d6",
    • +
    • palegoldenrod: "#eee8aa",
    • +
    • palegreen: "#98fb98",
    • +
    • paleturquoise: "#afeeee",
    • +
    • palevioletred: "#db7093",
    • +
    • papayawhip: "#ffefd5",
    • +
    • peachpuff: "#ffdab9",
    • +
    • peru: "#cd853f",
    • +
    • pink: "#ffc0cb",
    • +
    • plum: "#dda0dd",
    • +
    • powderblue: "#b0e0e6",
    • +
    • purple: "#800080",
    • +
    • rebeccapurple: "#663399",
    • +
    • red: "#ff0000",
    • +
    • rosybrown: "#bc8f8f",
    • +
    • royalblue: "#4169e1",
    • +
    • saddlebrown: "#8b4513",
    • +
    • salmon: "#fa8072",
    • +
    • sandybrown: "#f4a460",
    • +
    • seagreen: "#2e8b57",
    • +
    • seashell: "#fff5ee",
    • +
    • sienna: "#a0522d",
    • +
    • silver: "#c0c0c0",
    • +
    • skyblue: "#87ceeb",
    • +
    • slateblue: "#6a5acd",
    • +
    • slategray: "#708090",
    • +
    • slategrey: "#708090",
    • +
    • snow: "#fffafa",
    • +
    • springgreen: "#00ff7f",
    • +
    • steelblue: "#4682b4",
    • +
    • tan: "#d2b48c",
    • +
    • teal: "#008080",
    • +
    • thistle: "#d8bfd8",
    • +
    • tomato: "#ff6347",
    • +
    • turquoise: "#40e0d0",
    • +
    • violet: "#ee82ee",
    • +
    • wheat: "#f5deb3",
    • +
    • white: "#ffffff",
    • +
    • whitesmoke: "#f5f5f5",
    • +
    • yellow: "#ffff00",
    • +
    • yellowgreen: "#9acd32",
    • +
    +
    +
  • +
  • +

    mode: str: Mode to use for the image. Default is 'RGBA'.

    +
  • +
  • size: tuple: Size of the image in pixels (width, height). Default is (1, 1) for changing size afterwards.
  • +
  • fps: float, optional: Frames per second for the video clip.
  • +
  • duration: float, optional: Duration of the video clip in seconds.
  • +
+
+
Attributes:
+
+
    +
  • color: str | tuple[int, ...]: The color of the video clip.
  • +
  • mode: str: The mode of the video clip.
  • +
  • Other attributes are inherited from the Data2ImageClip class.
  • +
+
+
Methods:
+
+
+

set_size(self, size: tuple[int, int]):

+
+
+
+

Set the size of the video clip.

+
+
Parameters:
+
size: tuple[int, int]: New size of the video clip in pixels (width, height).
+
Example Usage:
+
color_clip.set_size((800, 600))
+
+
+
+
+
+
Example Usage:
+
+
# Create a red square video clip (500x500, 30 FPS, 5 seconds):
+red_square = ColorClip(color='red', size=(500, 500), fps=30, duration=5)
+
+# Create a blue fullscreen video clip (1920x1080, default FPS and duration):
+blue_fullscreen = ColorClip(color='blue', size=(1920, 1080))
+
+# Create a green transparent video clip (RGBA mode, 800x600):
+green_transparent = ColorClip(color=(0, 255, 0, 0), mode='RGBA', size=(800, 600))
+
+
+
+
+
+

TextClip

+
+
class vidiopy.TextClip(text: str, font_pth: None | str = None, font_size: int = 20, txt_color: str | tuple[int, ...] = (255, 255, 255, 0), bg_color: str | tuple[int, ...] = (0, 0, 0, 0), fps=None, duration=None)
+
+

Bases: #!py vidiopy.Data2ImageClip

+

A class representing a text clip to be used in video compositions.

+
+
Parameters:
+
+
    +
  • text (str): The text content to be displayed in the clip.
  • +
  • font_pth (None | str, optional): The file path to the TrueType font file (.ttf). If None, the default system font is used. Defaults to None.
  • +
  • font_size (int, optional): The font size for the text. Defaults to 20.
  • +
  • txt_color (str | tuple[int, ...], optional): The color of the text specified as either a string (e.g., 'white') or a tuple representing RGBA values. Defaults to (255, 255, 255, 0) (fully transparent white).
  • +
  • bg_color (str | tuple[int, ...], optional): The background color of the text clip, specified as either a string (e.g., 'black') or a tuple representing RGBA values. Defaults to (0, 0, 0, 0) (fully transparent black).
  • +
  • fps (float, optional): Frames per second of the video. If None, the value is inherited from the parent class. Defaults to None.
  • +
  • duration (float, optional): Duration of the video clip in seconds. If None, the value is inherited from the parent class. Defaults to None.
  • +
+
+
Attributes:
+
+
    +
  • font (PIL.ImageFont.FreeTypeFont): The font object used for rendering the text.
  • +
  • image (PIL.Image.Image): The image containing the rendered text.
  • +
  • fps (float): Frames per second of the video clip.
  • +
  • duration (float): Duration of the video clip in seconds.
  • +
  • Other attributes are inherited from the Data2ImageClip class.
  • +
+
+
Example Usage:
+
+
# Create a TextClip with custom text and styling
+text_clip = TextClip("Contribute to Vidiopy", font_size=30, txt_color='red', bg_color='blue', fps=24, duration=5.0)
+
+# Use the text clip in a video composition
+composition = CompositeVideoClip([other_clip, text_clip])
+composition.write_videofile("output.mp4", codec='libx264', fps=24)
+
+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/video_clips/imagesequenceclip/index.html b/reference_manual/clips/video_clips/imagesequenceclip/index.html new file mode 100644 index 0000000..3ce496a --- /dev/null +++ b/reference_manual/clips/video_clips/imagesequenceclip/index.html @@ -0,0 +1,1044 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ImageSequenceClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

ImageSequenceClip

+
+

class vidiopy.VideoClip.ImageSequenceClip

+
+
+
+

Bases: vidiopy.VideoClip.VideoClip(sequence, fps=None, duration=None, audio=None)

+

A class used to represent a sequence of images as a video clip. This class extends the VideoClip class and provides additional functionality for handling sequences of images.

+
+
Attributes:
+
+
    +
  • +

    clip (tuple[Image.Image, ...]): The sequence of images as a tuple of PIL Images.

    +
  • +
  • +

    It inherits all the attributes from the VideoClip class.

    +
  • +
+
+
Parameters:
+
+
    +
  • sequence (str | Path | tuple[Image.Image, ...] | tuple[np.ndarray, ...] | tuple[str | Path, ...]): The sequence to import. It can be a tuple of PIL Images, paths to images, numpy arrays, or a path to a directory.
  • +
  • fps (int | float | None, optional): The frames per second of the image sequence clip. If not specified, it is calculated from the duration and the number of images in the sequence.
  • +
  • duration (int | float | None, optional): The duration of the image sequence clip in seconds. If not specified, it is calculated from the fps and the number of images in the sequence.
  • +
  • audio (optional): The audio of the image sequence clip. If not specified, the image sequence clip will have no audio.
  • +
+
+
Methods:
+
+
+

make_frame_array(t)

+
+
+
+

Generates a numpy array representation of a specific frame in the image sequence clip.

+

This method calculates the index of the frame for a specific time, retrieves the frame from the image sequence clip, and converts it to a numpy array.

+

Parameters: +- t (int | float): The time of the frame to convert.

+

Returns: +- np.ndarray: The numpy array representation of the frame.

+

Requires: +- duration or end to be set.

+
+
+
+

make_frame_pil(t)

+
+
+
+

Generates a PIL Image representation of a specific frame in the image sequence clip.

+

This method calculates the index of the frame for a specific time, retrieves the frame from the image sequence clip, and returns it as a PIL Image.

+

Parameters: +- t (int | float): The time of the frame to convert.

+

Returns: +- Image.Image: The PIL Image representation of the frame.

+

Raises: +- ValueError: If neither the duration nor the end of the image sequence clip is set.

+

Requires: +- duration or end to be set.

+
+
+
+

fl_frame_transform(func, *args, **kwargs)

+
+
+
+

Applies a function to each frame of the image sequence clip.

+

This method iterates over each frame in the image sequence clip, applies a function to it, and replaces the original frame with the result. The function is expected to take a PIL Image as its first argument and return a PIL Image.

+

Parameters: +- func (Callable[..., Image.Image]): The function to apply to each frame. It should take a PIL Image as its first argument and return a PIL Image. +- *args: Additional positional arguments to pass to the function. +- **kwargs: Additional keyword arguments to pass to the function.

+

Returns: +- ImageSequenceClip: The current instance of the ImageSequenceClip class.

+

Example: +

>>> image_sequence_clip = ImageSequenceClip()
+>>> image_sequence_clip.fl_frame_transform(lambda frame: frame.rotate(90))
+

+
+
+
+

fl_clip_transform(func, *args, **kwargs)

+
+
+
+

Applies a function to each frame of the image sequence clip along with its timestamp.

+

This method iterates over each frame in the image sequence clip, applies a function to it and its timestamp, and replaces the original frame with the result. The function is expected to take a PIL Image and a float as its first two arguments and return a PIL Image.

+

Parameters: +- func (Callable[..., Image.Image]): The function to apply to each frame. It should take a PIL Image and a float as its first two arguments and return a PIL Image. +- *args: Additional positional arguments to pass to the function. +- **kwargs: Additional keyword arguments to pass to the function.

+

Returns: +- ImageSequenceClip: The current instance of the ImageSequenceClip class.

+

Raises: +- ValueError: If the fps of the image sequence clip is not set.

+

Requires: +- fps to be set.

+

Example: +

>>> image_sequence_clip = ImageSequenceClip()
+>>> image_sequence_clip.fl_clip_transform(lambda frame, t: frame.rotate(90 * t))
+

+
+
+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/video_clips/mixing_clips/index.html b/reference_manual/clips/video_clips/mixing_clips/index.html new file mode 100644 index 0000000..b79a7af --- /dev/null +++ b/reference_manual/clips/video_clips/mixing_clips/index.html @@ -0,0 +1,1014 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mixing VideoClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

CompositeVideoCLip

+
+

def composite_videoclips(clips: Sequence[VideoClip], fps: int | float | None = None, bg_color: tuple[int, ...] = (0, 0, 0, 0), use_bg_clip: bool = False, audio: bool = True, audio_fps=44100)

+
+
+
+

Composites multiple video clips into a single video clip.

+

This function takes a sequence of video clips and composites them into a single video clip. The clips are layered on top of each other in the order they appear in the sequence. The background of the composite clip can be a solid color or the first clip in the sequence. The function also handles the positioning of each clip in the composite clip and the audio of the composite clip.

+
+
Args:
+
+
    +
  • clips: Sequence[VideoClip]: The sequence of video clips to composite.
  • +
  • fps (int | float | None, optional): The frames per second of the composite clip. If not specified, it is set to the maximum fps of the clips in the sequence or raises a ValueError if none of the clips have fps set.
  • +
  • bg_color (tuple[int, ...], optional): The background color of the composite clip as a tuple of integers representing RGBA values. Default is (0, 0, 0, 0) which is transparent.
  • +
  • use_bg_clip (bool, optional): Whether to use the first clip in the sequence as the background of the composite clip. Default is False.
  • +
  • audio (bool, optional): Whether to include audio in the composite clip. If True, the audio of the clips in the sequence is also composited. Default is True.
  • +
  • audio_fps (int, optional): The frames per second of the audio of the composite clip. Default is 44100.
  • +
+
+
Returns:
+
ImageSequenceClip: The composite video clip as an instance of the ImageSequenceClip class.
+
Raises:
+
+
    +
  • ValueError: If neither fps nor duration is set for any of the clips in the sequence.
  • +
  • ValueError: If the position of a clip in the composite clip is not specified correctly.
  • +
  • TypeError: If the position of a clip in the composite clip is not of the correct type.
  • +
+
+
Example:
+
>>> clip1 = VideoClip(...)
+>>> clip2 = VideoClip(...)
+>>> composite_clip = composite_videoclips([clip1, clip2], fps=24)
+
+
Note:
+
This function uses the `#!py ImageSequenceClip class to create the composite video clip and the composite_audioclips function to composite the audio of the clips.
+
+
+
+

ConcatenateVideoClips

+
+

def concatenate_videoclips(clips: Sequence[VideoClip], transparent: bool = False, fps: int | float | None = None, scaling_strategy: str = "scale_same", transition: ( VideoClip | Callable[[Image.Image, Image.Image, int | float], VideoClip] | None ) = None, audio: bool = True, audio_fps: int | None = None):

+
+
+
+

Concatenates multiple video clips into a single video clip.

+

This function takes a sequence of video clips and concatenates them into a single video clip. The clips are appended one after the other in the order they appear in the sequence. The function also handles the scaling of each clip in the concatenated clip and the audio of the concatenated clip.

+
+
Args:
+
+
    +
  • clips (Sequence[VideoClip]): The sequence of video clips to concatenate.
  • +
  • transparent (bool, optional): Whether to use a transparent background for the concatenated clip. Default is False.
  • +
  • fps (int | float | None, optional): The frames per second of the concatenated clip. If not specified, it is set to the maximum fps of the clips in the sequence or raises a ValueError if none of the clips have fps set.
  • +
  • scaling_strategy (bool | None, optional): The scaling strategy to use for the clips in the concatenated clip. If 'scale_up', the clips are scaled up to fit the size of the concatenated clip. If 'scale_down', the clips are scaled down to fit the size of the concatenated clip. If 'scale_same', the clips are not scaled. Default is 'scale_same'.
  • +
  • transition (VideoClip | Callable[[Image.Image, Image.Image, int | float], VideoClip] | None, optional): The transition to use between the clips in the concatenated clip. If a VideoClip, it is used as the transition. If a callable, it is called with the last frame of the previous clip, the first frame of the next clip, and the duration of the transition to generate the transition. If None, no transition is used. Default is None.
  • +
  • audio (bool, optional): Whether to include audio in the concatenated clip. If True, the audio of the clips in the sequence is also concatenated. Default is True.
  • +
  • audio_fps (int | None, optional): The frames per second of the audio of the concatenated clip. Default is None.
  • +
+
+
Returns:
+
ImageSequenceClip: The concatenated video clip as an instance of the ImageSequenceClip class.
+
+

Raises: + - ValueError: If neither fps nor duration is set for any of the clips in the sequence. + - ValueError: If the size of a clip in the concatenated clip is not specified correctly. + - TypeError: If the scaling strategy of a clip in the concatenated clip is not of the correct type.

+
+
Example:
+
>>> clip1 = VideoClip(...)
+>>> clip2 = ImageClip(...)
+>>> concatenated_clip = concatenate_videoclips([clip1, clip2], fps=24)
+
+
Note:
+
This function uses the ImageSequenceClip class to create the concatenated video clip and the concatenate_audioclips function to concatenate the audio of the clips.
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/video_clips/videoclip/index.html b/reference_manual/clips/video_clips/videoclip/index.html new file mode 100644 index 0000000..814c155 --- /dev/null +++ b/reference_manual/clips/video_clips/videoclip/index.html @@ -0,0 +1,1642 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + VideoClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

VideoClip

+
+

class vidiopy.VideoClip.VideoClip

+
+
+
+

Base: vidiopy.Clip.Clip

+

A VideoClip is a Base Class for all Video And Image clips (VideoFileClip, ImageClip and ImageSequenceClip)

+

See VideoFileClip, ImageClip etc. for more user-friendly classes.

+
+
Attributes:
+
+
+

_st: float | int

+
+
+
The start time of the clip (in seconds).
+
+
+

_ed: float | int | None

+
+
+
The end time of the clip (in seconds).
+
+
+

_dur: float | int | None

+
+
+
+

The Duration of the clip (in seconds).

+
+Warning: Not Real Duration +

It Many not equal to video.end - video.start. +It is the Original Duration In which Video Is imported or any thing else.

+
+
+
+
+

fps: float | int | None

+
+
+
The FPS(Frame per Second) of the Video.
+
+
+

size: tuple[int, int]

+
+
+
The size of the clip, (width,height), in pixels.
+
+
+

audio: AudioClip | None

+
+
+
Audio in the Video.
+
+
+

pos: Callable[[float | int], tuple[int | str | float, int | str | float]]

+
+
+
A function t->(x,y) where x,y is the position of the clip when it is composed with other clips. See VideoClip.set_pos for more details.
+
+
+

relative_pos: bool

+
+
+
A Bool Which Determine whether the pos will output a relative position or in pixel.
+
+
+
Properties:
+
+
+

start: float | int

+
+
+
The start time of the clip (in seconds).
+
+
+

end: float | int | None

+
+
+
The end time of the clip (in seconds).
+
+
+

duration: float | int | None

+
+
+
+

The Duration of the clip (in seconds).

+
+Warning: Not Real Duration +

It Many not equal to video.end - video.start. +It is the Original Duration In which Video Is imported or any thing else.

+
+
+
+
+

width | w: int

+
+
+
The width of the clip, in pixels.
+
+
+

height | h: int

+
+
+
The height of the clip, in pixels.
+
+
+

aspect_ratio: Fraction

+
+
+
The aspect ratio of the clip, (width / height).
+
+
+
methods:
+
+
+

set_start(self, value: int | float) -> VideoClip

+
+
+
+

The set_start method is used to set the start time of the video clip. +It Changes _st attribute of the VideoClip.

+
+
Args:
+
value: int | float: The start time of the video clip.
+
Returns:
+
VideoClip: The instance of the VideoClip after setting the start time.
+
+
+
+
+

set_end(self, value: int | float) -> VideoClip

+
+
+
+

The set_end method is used to set the end time of the video clip. +It Changes _ed attribute of the VideoClip.

+
+
Args:
+
value: int | float: The end time of the video clip.
+
Returns:
+
VideoClip: The instance of the VideoClip after setting the end time.
+
+
+
+
+

set_duration(self, value: int | float) -> VideoClip

+
+
+
+

Setter for the duration of the video clip. +it raises a ValueError since duration is not allowed to be set. +but you can change the duration using clip._dur = value or the _set_duration method.

+
+
Args:
+
dur: int | float: The duration to set for the video clip.
+
Returns:
+
NoReturn: Raises a ValueError since duration is not allowed to be set.
+
Raises:
+
ValueError: If an attempt is made to set the duration, a ValueError is raised.
+
+
+
+
+

_set_duration(self, value: int | float) -> VideoClip

+
+
+
+

Private method to set the duration of the video clip. +It Changes _dur attribute of the VideoClip.

+
+
Args:
+
value: int | float: The duration to set for the video clip.
+
Returns:
+
VideoClip: The instance of the VideoClip after setting the duration.
+
+
+
+
+

set_position(self, pos: (tuple[int | float | str, int | float | str] | list[int | float | str] | Callable[[float | int], tuple[int | float | str, int | float | str]]), relative=False) -> Self:

+
+
+
+

Sets the position of the video clip. +This is useful for the concatenate method, where the position of the video clip is used to set it on other clip. +This method allows the position of the video clip to be set either as a fixed tuple of coordinates, or as a function that returns a tuple of coordinates at each time. The position can be set as absolute or relative to the size of the clip using the relative.

+
+
Note:
+
+
    +
  • It Should Be the coordinates of the Video on the top left corner.
  • +
  • If relative is True, the position should be between the 0.0 & 1.0.
  • +
  • If relative is False, the position should be between the 0 & width or height of the video.
  • +
+
+
Parameters:
+
+
+
pos: tuple | Callable: The position to set for the video clip. This can be either:
+
+
    +
  • a tuple of two integers or floats, representing the x and y coordinates of the position, or
  • +
  • a callable that takes a single float or integer argument (representing the time) and returns a tuple of two integers or floats, representing the x and y coordinates of the position.
  • +
+
+
+relative (bool, optional): Whether the position is relative to the size of the clip. If True, the position is interpreted as a fraction of the clip's width and height. Defaults to False.
+
Raises:
+
TypeError: If pos is not a tuple or a callable.
+
Returns:
+
self: Returns the instance of the class.
+
+
+
+
+

set_audio(self, audio: AudioClip | None) -> Self:

+
+
+
+

Sets the audio for the video clip.

+

This method assigns the provided audio clip to the video clip. If the audio clip is not None, +it also sets the start and end times of the audio clip to match the video clip's start and end times.

+
+
Parameters:
+
audio: AudioClip | None: The audio clip to be set to the video clip. If None, no audio is set.
+
Returns:
+
Self: Returns the instance of the class with updated audio clip.
+
+
+
+
+

without_audio(self) -> Self:

+
+
+
+

Removes the audio from the current VideoClip instance.

+

This method sets the 'audio' attribute of the VideoClip instance to None, effectively removing any audio that the clip might have.

+
+
Returns:
+
VideoClip: The same instance of the VideoClip but without any audio. This allows for method chaining.
+
Example:
+
>>> clip = VideoClip(...)
+>>> clip_without_audio = clip.without_audio()
+
+
Note:
+
This method modifies the VideoClip instance in-place. If you want to keep the original clip with audio, consider making a copy before calling this method.
+
+
+
+
+

set_fps(self, fps: int | float) -> Self:

+
+
+
+

Set the frames per second (fps) for the video clip.

+

This method allows you to set the fps for the video clip. The fps value +determines how many frames are shown per second during playback. A higher +fps value results in smoother video playback.

+
+
Parameters:
+
fps: int | float: The frames per second value to set. This can be an integer +or a float. For example, a value of 24 would mean 24 frames are shown per second.
+
Raises:
+
TypeError: If the provided fps value is not an integer or a float.
+
Returns:
+
Self: Returns the instance of the class, allowing for method chaining.
+
Example:
+
>>> clip = VideoClip()
+>>> clip.set_fps(24)
+
+
+
+
+
+

make_frame_array(self, t) -> np.ndarray:

+
+
+
+

Generate a frame at time t as a NumPy array.

+

This method is intended to be overridden in subclasses. It should return +a NumPy array representing the frame at the given time.

+
+
Parameters:
+
t: float: The time at which to generate the frame.
+
Raises:
+
NotImplementedError: If the method is not overridden in a subclass.
+
Returns:
+
np.ndarray: A NumPy array representing the frame at time t.
+
Example:
+
>>> clip = VideoClipSubclass()
+>>> frame = clip.make_frame_array(0.5)
+
+
+
+
+
+

make_frame_pil(self, t) -> np.ndarray:

+
+
+
+

Generate a frame at time t as a NumPy array.

+

This method is intended to be overridden in subclasses. It should return +a PIL representing the frame at the given time.

+
+
Parameters:
+
t: float: The time at which to generate the frame.
+
Raises:
+
NotImplementedError: If the method is not overridden in a subclass.
+
Returns:
+
np.ndarray: A NumPy array representing the frame at time t.
+
Example:
+
>>> clip = VideoClipSubclass()
+>>> frame = clip.make_frame_pil(0.5)
+
+
+
+
+
+

get_frame(self, t: int | float, is_pil=None) -> np.ndarray | Image.Image:

+
+
+
+

Get a frame at time t.

+

This method returns a frame at the given time t. The frame can be returned +as a NumPy array or a PIL Image, depending on the value of is_pil.

+
+
Parameters:
+
t: int | float: The time at which to get the frame. +is_pil (bool, optional): If True, the frame is returned as a PIL Image. If False or None, the frame is returned as a NumPy array. Defaults to None.
+
Raises:
+
ValueError: If is_pil is not True, False, or None.
+
Returns:
+
np.ndarray | Image.Image: The frame at time t as a NumPy array or a PIL Image.
+
Example:
+
>>> clip = VideoClip()
+>>> frame_array = clip.get_frame(0.5)
+>>> frame_pil = clip.get_frame(0.5, is_pil=True)
+
+
+
+
+
+

iterate_frames_pil_t(self, fps: int | float) -> Generator[Image.Image, Any, None]:

+
+
+
+

Iterate over frames as PIL Images at a given frames per second (fps).

+

This method generates frames at a given fps as PIL Images. The frames are +generated from the start of the clip to the end or duration, whichever is set.

+
+
Parameters:
+
fps: int | float: The frames per second at which to generate frames.
+
Raises:
+
ValueError: If neither end nor duration is set.
+
Yields:
+
Image.Image: The next frame as a PIL Image.
+
Example:
+
>>> clip = VideoClip()
+>>> for frame in clip.iterate_frames_pil_t(24):
+...     # Do something with frame
+
+
+
+
+
+

iterate_frames_array_t(self, fps: int | float) -> Generator[np.ndarray, Any, None]:

+
+
+
+

Iterate over frames as NumPy arrays at a given frames per second (fps).

+

This method generates frames at a given fps as NumPy arrays. The frames are +generated from the start of the clip to the end or duration, whichever is set.

+
+
Parameters:
+
fps: int | float: The frames per second at which to generate frames.
+
Raises:
+
ValueError: If neither end nor duration is set.
+
Yields:
+
np.ndarray: The next frame as a NumPy array.
+
Example:
+
>>> clip = VideoClip()
+>>> for frame in clip.iterate_frames_array_t(24):
+...     # Do something with frame
+
+
+
+
+
+

sub_clip_copy(self, t_start: int | float | None = None, t_end: int | float | None = None) -> Self:

+
+
+
+

Returns a subclip of the clip.copy, starting at time t_start (in seconds).

+
+
Parameters:
+
t_start: int | float | None, optional: The start time of the subclip in seconds. Defaults to None.
+
t_end: int | float | None, optional: The end time of the subclip in seconds. Defaults to None.
+
Returns:
+
Self: The subclip of the clip.
+
Raises:
+
NotImplementedError: If the method is not overridden in a subclass.
+
Example:
+
>>> clip = VideoClip()
+>>> subclip = clip.sub_clip_copy(t_start=1.5, t_end=3.5)
+
+
+
+
+
+

sub_clip(self, t_start: int | float | None = None, t_end: int | float | None = None) -> Self:

+
+
+
+

Returns a subclip of the clip, starting at time t_start and ending at time t_end.

+
+
Parameters:
+
t_start: int | float | None, optional: The start time of the subclip in seconds. Defaults to None.
+
t_end: int | float | None, optional: The end time of the subclip in seconds. Defaults to None.
+
Returns:
+
Self: The subclip of the clip.
+
Raises:
+
NotImplementedError: If the method is not overridden in a subclass.
+
Example:
+
>>> clip = VideoClip()
+>>> subclip = clip.sub_clip(t_start=1.5, t_end=3.5)
+
+
+
+
+
+

fl_frame_transform(self, func, *args, **kwargs) -> Self:

+
+
+
+

Apply a frame transformation function to each frame of the video clip.

+

This method calls the provided function func on each frame of the clip and applies the transformation. +The transformed frames are then stored in a list and assigned back to the clip.

+
+
Parameters:
+
func: The frame transformation function to be applied.
+
*args: Additional positional arguments to be passed to the transformation function.
+
**kwargs: Additional keyword arguments to be passed to the transformation function.
+
Returns:
+
Self: The modified video clip object.
+
Example:
+
>>> def grayscale(frame):
+>>>     # Convert frame to grayscale
+>>>     return cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
+>>>
+>>> clip = VideoClip()
+>>> clip.fl_frame_transform(grayscale)
+
+
Note:
+
This method is meant to be overridden in the subclass. If not overridden, it raises a NotImplementedError.
+
The transformation function func should accept a single frame as the first argument and return the transformed frame.
+
+
+
+
+

fl_time_transform(self, func_t: Callable[[int | float], int | float]) -> Self:

+
+
+
+

Apply a time transformation function to the clip.

+

This method modifies the make_frame_array and make_frame_pil methods +to apply a time transformation function func_t to the time t before +generating the frame. This can be used to speed up, slow down, or reverse +the clip, among other things.

+

If the clip has audio, the same time transformation is applied to the audio.

+
+
Parameters:
+
func_t (Callable[[int | float], int | float]): The time transformation function to apply. This function should take a time t and return a new time.
+
Returns:
+
Self: Returns the instance of the class, allowing for method chaining.
+
Example:
+
>>> clip = VideoClip()
+>>> clip.fl_time_transform(lambda t: 2*t)  # Speed up the clip by a factor of 2
+
+
+
+
+
+

fx(self, func: Callable[..., Self], *args, **kwargs) -> Self:

+
+
+
+

Apply an effect function to the clip.

+

This method applies an effect function func to the clip. The effect function +should take the clip as its first argument, followed by any number of positional +and keyword arguments.

+

The effect function should return a new clip, which is then returned by this method.

+
+
Parameters:
+
func (Callable[..., Self]): The effect function to apply. This function should take the clip as its first argument, followed by any number of positional and keyword arguments.
+
*args: Positional arguments to pass to the effect function.
+
**kwargs: Keyword arguments to pass to the effect function.
+
Returns:
+
Self: The new clip returned by the effect function.
+
Example:
+
>>> clip = VideoClip()
+>>> clip.fx(effect_function, arg1, arg2, kwarg1=value1)
+
+
+
+
+
+

sub_fx(self, func: Callable[..., Self], *args, start_t: int | float | None = None, end_t: int | float | None = None, **kwargs) -> Self:

+
+
+
+

Apply an effect function to a subclip of the clip.

+

This method creates a subclip from start_t to end_t, applies an effect +function func to the subclip, and returns the modified subclip.

+

The effect function should take the clip as its first argument, followed by +any number of positional and keyword arguments.

+
+
Parameters:
+
func (Callable[..., Self]): The effect function to apply. This function should take the clip as its first argument, followed by any number of positional and keyword arguments.
+
*args: Positional arguments to pass to the effect function.
+
start_t (int | float | None, optional): The start time of the subclip. If None, the start of the clip is used. Defaults to None.
+
end_t (int | float | None, optional): The end time of the subclip. If None, the end of the clip is used. Defaults to None.
+
**kwargs: Keyword arguments to pass to the effect function.
+
Returns:
+
Self: The modified subclip.
+
Example:
+
>>> clip = VideoClip()
+>>> subclip = clip.sub_fx(effect_function, arg1, arg2, start_t=1, end_t=2, kwarg1=value1)
+
+
+
+
+
+

_sync_audio_video_s_e_d(self) -> Self:

+
+
+
+

Synchronizes the audio and video start, end, and duration attributes.

+

This method is used to ensure that the audio and video parts of a clip are in sync. +It sets the start, end, and original duration of the audio to match the video.

+
+
Returns:
+
Self: Returns the instance of the class with updated audio attributes.
+
Raises:
+
None
+
Example:
+
>>> video_clip = VideoClip()
+>>> video_clip._sync_audio_video_s_e_d()
+
+
Note:
+
This is an internal method, typically not meant to be used directly by the user.
+
+
+
+
+

write_videofile(self, filename, fps=None, codec=None, bitrate=None, audio=True, audio_fps=44100, preset="medium", pixel_format=None, audio_codec=None, audio_bitrate=None, threads=None, ffmpeg_params: dict[str, str] | None = None, logger="bar", over_write_output=True) -> Self:

+
+
+
+

Writes the video clip to a file.

+

This method generates video frames, processes them, and writes them to a file. +If audio is present in the clip, it is also written to the file.

+
+
Args:
+
filename (str): The name of the file to write.
+
fps (int, optional): The frames per second to use for the output video. If not provided, the fps of the video clip is used.
+
codec (str, optional): The codec to use for the output video.
+
bitrate (str, optional): The bitrate to use for the output video.
+
audio (bool, optional): Whether to include audio in the output video. Defaults to True.
+
audio_fps (int, optional): The frames per second to use for the audio. Defaults to 44100.
+
preset (str, optional): The preset to use for the output video. Defaults to "medium".
+
pixel_format (str, optional): The pixel format to use for the output video.
+
audio_codec (str, optional): The codec to use for the audio.
+
audio_bitrate (str, optional): The bitrate to use for the audio.
+
threads (int, optional): The number of threads to use for writing the video file.
+
ffmpeg_params (dict[str, str] | None, optional): Additional parameters to pass to ffmpeg.
+
logger (str, optional): The logger to use. Defaults to "bar".
+
over_write_output (bool, optional): Whether to overwrite the output file if it already exists. Defaults to True.
+
Returns:
+
Self: Returns the instance of the class.
+
Raises:
+
Exception: If fps is not provided and not set in the video clip.
+
Example:
+
>>> video_clip = VideoClip()
+>>> video_clip.write_videofile("output.mp4")
+
+
Note:
+
This method uses ffmpeg to write the video file.
+
+
+
+
+

write_videofile_subclip(self, filename, start_t: int | float | None = None, end_t: int | float | None = None, fps=None, codec=None, bitrate=None, audio=True, audio_fps=44100, preset="medium", pixel_format=None, audio_codec=None, audio_bitrate=None, write_logfile=False, verbose=True, threads=None, ffmpeg_params: dict[str, str] | None = None, logger="bar", over_write_output=True) -> Self:

+
+
+
+

Writes a subclip of the video clip to a file.

+

This method generates video frames for a specific part of the video (subclip), processes them, and writes them to a file. +If audio is present in the clip, it is also written to the file.

+
+
Args:
+
filename (str): The name of the file to write.
+
start_t (int | float | None, optional): The start time of the subclip. If not provided, the start of the video is used.
+
end_t (int | float | None, optional): The end time of the subclip. If not provided, the end of the video is used.
+
fps (int, optional): The frames per second to use for the output video. If not provided, the fps of the video clip is used.
+
codec (str, optional): The codec to use for the output video.
+
bitrate (str, optional): The bitrate to use for the output video.
+
audio (bool, optional): Whether to include audio in the output video. Defaults to True.
+
audio_fps (int, optional): The frames per second to use for the audio. Defaults to 44100.
+
preset (str, optional): The preset to use for the output video. Defaults to "medium".
+
pixel_format (str, optional): The pixel format to use for the output video.
+
audio_codec (str, optional): The codec to use for the audio.
+
audio_bitrate (str, optional): The bitrate to use for the audio.
+
write_logfile (bool, optional): Whether to write a logfile. Defaults to False.
+
verbose (bool, optional): Whether to print verbose output. Defaults to True.
+
threads (int, optional): The number of threads to use for writing the video file.
+
ffmpeg_params (dict[str, str] | None, optional): Additional parameters to pass to ffmpeg.
+
logger (str, optional): The logger to use. Defaults to "bar".
+
over_write_output (bool, optional): Whether to overwrite the output file if it already exists. Defaults to True.
+
Returns:
+
Self: Returns the instance of the class.
+
Raises:
+
Exception: If fps is not provided and not set in the video clip.
+
Example:
+
>>> video_clip = VideoClip()
+>>> video_clip.write_videofile_subclip("output.mp4", start_t=10, end_t=20)
+
+
Note:
+
This method uses ffmpeg to write the video file.
+
+
+
+
+

write_image_sequence(self, nformat: str, fps: int | float | None = None, dir=".") -> Self:

+
+
+
+

Writes the frames of the video clip as an image sequence.

+

This method generates video frames, processes them, and writes them as images to a directory. +The images are named by their frame number and the provided format.

+
+
Args:
+
nformat (str): The format to use for the output images.
+
fps (int | float | None, optional): The frames per second to use for the output images. If not provided, the fps of the video clip is used.
+
dir (str, optional): The directory to write the images to. Defaults to the current directory.
+
Returns:
+
Self: Returns the instance of the class.
+
Raises:
+
ValueError: If fps is not provided and fps and duration are not set in the video clip.
+
Example:
+
>>> video_clip = VideoClip()
+>>> video_clip.write_image_sequence("png", fps=24, dir="frames")
+
+
Note:
+
This method uses ffmpeg to write the images.
+
+
+
+
+

save_frame(self, t: int | float, filename: str) -> Self:

+
+
+
+

Saves a specific frame of the video clip as an image.

+

This method generates a video frame for a specific time, processes it, and writes it as an image to a file.

+
+
Args:
+
t (int | float): The time of the frame to save.
+
filename (str): The name of the file to write.
+
Returns:
+
Self: Returns the instance of the class.
+
Example:
+
>>> video_clip = VideoClip()
+>>> video_clip.save_frame(10, "frame10.png")
+
+
Note:
+
This method uses ffmpeg to write the image.
+
+
+
+
+

to_ImageClip(self, t: int | float):

+
+
+
+

Converts a specific frame of the video clip to an ImageClip.

+

This method generates a video frame for a specific time, processes it, and converts it to an ImageClip.

+
+
Args:
+
t (int | float): The time of the frame to convert.
+
Returns:
+
Data2ImageClip: The converted ImageClip.
+
Raises:
+
None
+
Example:
+
>>> video_clip = VideoClip()
+>>> image_clip = video_clip.to_ImageClip(10)
+
+
Note:
+
This method uses ffmpeg to generate the frame and then converts it to an ImageClip.
+
+
+
+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/clips/video_clips/videofileclip/index.html b/reference_manual/clips/video_clips/videofileclip/index.html new file mode 100644 index 0000000..1777f3a --- /dev/null +++ b/reference_manual/clips/video_clips/videofileclip/index.html @@ -0,0 +1,1163 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + VideoFileClip - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

VideoFileClip

+
+

class vidiopy.VideoFileClip(filename: str, audio: bool = True, ffmpeg_options: dict | None = None)

+
+
+
+

Bases: vidiopy.VideoClip

+

All Methods and properties of the VideoClip class are available.

+

A video clip originating from a Video file.

+
+
Parameters:
+
+
+

filename: str

+
+
+
The name of the video file, as a string or a path-like object. It can have any extension supported by ffmpeg.
+All Sported extensions +
    +
  • .mp4
  • +
  • .avi
  • +
  • .mov
  • +
  • .mkv
  • +
  • .webm
  • +
  • .flv
  • +
  • .wmv
  • +
  • .3gp
  • +
  • .ogg
  • +
  • .ogv
  • +
  • .mts
  • +
  • .m2ts
  • +
  • .ts
  • +
  • .vob
  • +
  • .mpg
  • +
  • .mpeg
  • +
  • .m2v
  • +
  • .m4v
  • +
  • .mxf
  • +
  • .dv
  • +
  • .f4v
  • +
  • .gif
  • +
  • .mp3
  • +
  • .wav
  • +
  • .flac
  • +
  • .ogg
  • +
  • .m4a
  • +
  • .wma
  • +
  • .aac
  • +
  • .ac3
  • +
  • .alac
  • +
  • .aiff
  • +
  • .amr
  • +
  • .au
  • +
  • .mka
  • +
  • .mp2
  • +
  • .mpa
  • +
  • .opus
  • +
  • .ra
  • +
  • .tta
  • +
  • .wv
  • +
  • .weba
  • +
  • .webm
  • +
  • .webvtt
  • +
  • .srt +ETC.
  • +
+
+
+
+
+

audio: bool Default: True

+
+
+
Set to False if the clip doesn’t have any audio or if you do not wish to read the audio.
+
+
+

ffmpeg_options: dict | None Default: None

+
+
+
A dictionary of options to be passed to ffmpeg when generating the clip’s audio. If None, the default options will be used. If you want to pass options to the video part of the clip, you will have to use the vidiopy.VideoFileClip.set_make_frame method.
+
+
+
Attributes:
+
+
+

clip:

+
+
+
The Numpy array of the clip’s video frames.
+
+
+
+
+

Read docs for Clip() and VideoClip() for other, more generic, attributes.

+
+
+
Methods:
+
+
+

fl_frame_transform(self, func, *args, **kwargs) -> Self:

+
+
+
+

Applies a function to each frame of the video clip.

+

This method iterates over each frame in the video clip, applies a function to it, and replaces the original frame with the result.

+
+
Args:
+
func (callable): The function to apply to each frame. It should take an Image as its first argument, and return an Image.
+
*args: Additional positional arguments to pass to func.
+
**kwargs: Additional keyword arguments to pass to func.
+
Returns:
+
Self: Returns the instance of the class with updated frames.
+
Raises:
+
None
+
Example:
+
>>> video_clip = VideoClip()
+>>> def invert_colors(image):
+...     return ImageOps.invert(image)
+>>> video_clip.fl_frame_transform(invert_colors)
+
+
Note:
+
This method requires the start and end of the video clip to be set.
+
+
+
+
+

fl_clip_transform(self, func, *args, **kwargs) -> Self:

+
+
+
+

Applies a function to each frame of the video clip along with its timestamp.

+

This method iterates over each frame in the video clip, applies a function to it and its timestamp, and replaces the original frame with the result.

+
+
Args:
+
func (callable): The function to apply to each frame. It should take an Image and a float (representing the timestamp) as its first two arguments, and return an Image.
+
*args: Additional positional arguments to pass to func.
+
**kwargs: Additional keyword arguments to pass to func.
+
Returns:
+
Self: Returns the instance of the class with updated frames.
+
Raises:
+
None
+
Example:
+
>>> video_clip = VideoClip()
+>>> def add_timestamp(image, timestamp):
+...     draw = ImageDraw.Draw(image)
+...     draw.text((10, 10), str(timestamp), fill="white")
+...     return image
+>>> video_clip.fl_clip_transform(add_timestamp)
+
+
Note:
+
This method requires the fps of the video clip to be set.
+
+
+
+
+

make_frame_array(self, t: int | float) -> np.ndarray:

+
+
+
+

Generates a numpy array representation of a specific frame in the video clip.

+

This method calculates the index of the frame for a specific time, retrieves the frame from the video clip, and converts it to a numpy array.

+
+
Args:
+
t (int | float): The time of the frame to convert.
+
Returns:
+
np.ndarray: The numpy array representation of the frame.
+
Raises:
+
ValueError: If the duration of the video clip is not set.
+
Example:
+
>>> video_clip = VideoClip()
+>>> frame_array = video_clip.make_frame_array(10)
+
+
Note:
+
This method requires the duration of the video clip to be set.
+
+
+
+
+

make_frame_pil(self, t: int | float) -> Image.Image:

+
+
+
+

Generates a PIL Image representation of a specific frame in the video clip.

+

This method calculates the index of the frame for a specific time, retrieves the frame from the video clip, and returns it as a PIL Image.

+
+
Args:
+
t (int | float): The time of the frame to convert.
+
Returns:
+
Image.Image: The PIL Image representation of the frame.
+
Raises:
+
ValueError: If the duration of the video clip is not set.
+
Example:
+
>>> video_clip = VideoClip()
+>>> frame_image = video_clip.make_frame_pil(10)
+
+
Note:
+
This method requires the duration of the video clip to be set.
+
+
+
+
+

_import_video_clip(self, file_name: str, ffmpeg_options: dict | None = None) -> tuple:

+
+
+
+

Imports a video clip from a file using ffmpeg.

+

This method reads a video file using ffmpeg, converts each frame to a PIL Image, and returns a tuple of the images and the fps of the video.

+
+
Args:
+
file_name (str): The name of the video file to import.
+
ffmpeg_options (dict | None, optional): Additional options to pass to ffmpeg. Defaults to None.
+
Returns:
+
tuple: A tuple of the frames as PIL Images and the fps of the video.
+
Raises:
+
None
+
Example:
+
>>> video_clip = VideoClip()
+>>> frames, fps = video_clip._import_video_clip("video.mp4")
+
+
Note:
+
This method uses ffmpeg to read the video file. It is a private method and not intended for external use.
+
+
+
+
+
+
+
+ + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/reference_manual/reference_manual/index.html b/reference_manual/reference_manual/index.html new file mode 100644 index 0000000..a95abdd --- /dev/null +++ b/reference_manual/reference_manual/index.html @@ -0,0 +1,841 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Reference Manual - VidioPy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Reference Manual

+ + + + + + + + + + + + + +

Comments

+ + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ +
+ + + + + + + + + + \ No newline at end of file diff --git a/search/search_index.json b/search/search_index.json new file mode 100644 index 0000000..86c2590 --- /dev/null +++ b/search/search_index.json @@ -0,0 +1 @@ +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"VidioPy","text":"

VidioPy is a Python library for video processing. It provides a simple API for common video processing tasks such as reading and writing video files, transforming video clips, performing basic operations like cuts, concatenations, and title insertions. It also supports video compositing (a.k.a. non-linear editing), advanced video effects, and video processing.

"},{"location":"#overview","title":"Overview","text":"

1.Download & Install

1.Getting Started

1.Reference Manual

"},{"location":"#contributing","title":"Contributing","text":"

VidioPy is an open source project originally developed by Soham & released under the MIT license. It is currently maintained by Soham. The code is hosted on Github, where you can push improvements, report bugs and ask for help. We welcome all kinds of contributions, from code to documentation, to bug reports. Please read the contribution guidelines first.

"},{"location":"getting_started/basic_concepts/","title":"Basic Concepts","text":"

VidioPY primarily works with two types of clips: VideoClip and AudioClip. Both are derived from the Clip base class and can be manipulated in various ways such as cutting, slowing down, darkening, or mixing with other clips to create new ones. These clips can then be exported to various file formats such as MP4, GIF, MP3, etc.

flowchart TB\n    Clip[Clip]\n    Clip -->|inherits| VideoClip[VideoClip]\n    Clip -->|inherits| AudioClip[AudioClip]\n    VideoClip -->|inherits| ImageClip[ImageClip]\n    VideoClip -->|inherits| VideoFileClip[VideoFileClip]\n    VideoClip -->|inherits| ImageSequenceClip[ImageSequenceClip]\n    ImageClip -->|inherits| Data2ImageClip[Data2ImageClip]\n    Data2ImageClip -->|inherits| TextClip[TextClip]\n    Data2ImageClip -->|inherits| ColorClip[ColorClip]\n    AudioClip -->|inherits| SilenceClip[SilenceClip]\n    SilenceClip -->|inherits| AudioFileClip[AudioFileClip]\n    AudioClip -->|inherits| AudioArrayClip[AudioArrayClip]

VideoClip instances can be created from a video file, an image, a text, or a custom animation, and can include an audio track, which is an AudioClip.

VidioPY provides numerous effects that can be applied to a clip (e.g., clip.resize(width=\"360\"), clip.subclip(t1,t2), or clip.fx(vidiopy.brightness, 1.5)). Additionally, VidioPY implements several functions (like clip.fl_frame_transform, clip.fl_clip_transform, clip.fl_time_transform, clip.fx, etc.) that make it easy to create custom effects.

"},{"location":"getting_started/basic_concepts/#videoclip","title":"VideoClip","text":""},{"location":"getting_started/basic_concepts/#creating-a-videoclip","title":"Creating a VideoClip","text":"

A VideoClip can be created in several ways. The most common method is to load a video file using VideoFileClip:

from vidiopy import VideoFileClip\nclip = VideoFileClip(\"path/to/video.mp4\")\n

A VideoClip can also be created from an image:

from vidiopy import ImageClip\nclip = ImageClip(\"path/to/image.png\")\n
"},{"location":"getting_started/basic_concepts/#modifying-a-videoclip","title":"Modifying a VideoClip","text":"

A VideoClip has several attributes such as fps, duration, size, audio, start, end, etc. These can be modified using the corresponding set methods:

clip = clip.set_duration(10) # Not Allowed for the VideoClips only for the ImageClips\nclip = clip.set_fps(24) # Should be int or float\nclip = clip.set_start(5) # Use Full for the Compositing & Concatenating Video Clip. More in the Mixing clips Section\nclip = clip.set_end(15) # Use Full for the Compositing & Concatenating Video Clip. More in the Mixing clips Section\naudio = AudioClip(\"path/to/audio.mp3\")\nclip = clip.set_audio(audio) # Set the audio of the clip. use full for the ImageClips.\n...\n
"},{"location":"getting_started/basic_concepts/#applying-effects-to-a-videoclip","title":"Applying Effects to a VideoClip","text":"

Various effects can be applied to a VideoClip, including resize, crop, subclip, fx, etc.:

clip = resize(clip, width=360) # Resize the clip to the given width\nclip = crop(clip, x1=10, y1=10, x2=100, y2=100) # Crop the clip to the given dimensions\nclip = clip.subclip(t1=5, t2=10) # Cut the clip to the given duration\nclip = clip.fx(vidiopy.brightness, 1.5) # Apply the brightness effect to the clip\n...\n
"},{"location":"getting_started/basic_concepts/#exporting-a-videoclip","title":"Exporting a VideoClip","text":"

A VideoClip can be exported to a file using the write_videofile method:

clip.write_videofile(\"path/to/output/video.mp4\") # Write the clip to a file\nclip.write_videofile_subclip(\"path/to/output/video.mp4\", start_t=5, end_t=10) # Write the subclip to a file\nclip.write_image_sequence(nformat=\".png\", dir=\"images\") # Write the clip to a file as an image sequence\nclip.save_frame(\"path/to/output/frame.png\", t=5) # Save the frame of the clip to a file\n
"},{"location":"getting_started/basic_concepts/#imageclip","title":"ImageClip","text":""},{"location":"getting_started/basic_concepts/#creating-an-imageclip","title":"Creating an ImageClip","text":"

An ImageClip can be created by loading an image file using ImageClip or by creating a new image using Image.new or a numpy array:

from vidiopy import ImageClip\nfrom PIL import Image\nimport numpy as np\nclip = ImageClip(\"path/to/image.png\", fps=24, duration=10) # Create an image clip from a file\nclip2 = ImageClip(Image.new(\"RGB\", (720, 480), (0, 0, 0)), fps=24, duration=10) # Create an image clip from a PIL image\nclip3 = ImageClip(np.zeros((480, 720, 3), dtype=np.uint8), fps=24, duration=10) # Create an image clip from a numpy array\n
"},{"location":"getting_started/basic_concepts/#applying-effects-to-a-imageclip","title":"applying Effects to a ImageClip","text":"

All Effects that can be applied to the VideoClip can be applied to the ImageClip but it is bit different. Some effects you can apply directly to the ImageClip like the Video Clip but Some Cant Directly for that you have to convert the ImageClip to the VideoClip and then apply the effect:

from vidiopy import ImageClip\nclip = ImageClip(\"path/to/image.png\", fps=24, duration=10) # Create an image clip from a file\nclip = clip.resize(width=360) # Resize the clip to the given width\nclip = clip.to_video_clip() # Convert the ImageClip to the VideoClip\nclip = clip.fx(accel_decel, 0.5) # Apply the accel_decel effect to the clip\n
"},{"location":"getting_started/basic_concepts/#audioclip","title":"AudioClip","text":""},{"location":"getting_started/basic_concepts/#creating-an-audioclip","title":"Creating an AudioClip","text":"

An AudioClip can be created by loading an audio file using AudioFileClip or SilenceClip:

from vidiopy import AudioFileClip\nclip = AudioFileClip(\"path/to/audio.mp3\") # Create an audio clip from a file Also accept video file it will extract the audio from the video file\nclip = SilenceClip(duration=10) # Create a silent audio clip\n
"},{"location":"getting_started/basic_concepts/#modifying-an-audioclip","title":"Modifying an AudioClip","text":"

An AudioClip has several attributes such as audio_data, fps, start, end, etc. These can be modified using the corresponding set methods:

clip.fps = 24 # Set the fps of the clip\nclip.start = 5 # Set the start time of the clip\nclip.end = 15 # Set the end time of the clip\nclip.audio_data = audio_data # Set the audio data of the clip\n...\n
"},{"location":"getting_started/basic_concepts/#applying-effects-to-an-audioclip","title":"Applying Effects to an AudioClip","text":"

An AudioClip has several attributes such as audio_data, fps, start, end, etc. These can be modified using the corresponding set methods:

clip = clip.sub_clip(start=5, end=10) # Cut the clip to the given duration\nclip = audio_normalize(clip) # Apply the normalize effect to the clip\n...\n
"},{"location":"getting_started/basic_concepts/#exporting-an-audioclip","title":"Exporting an AudioClip","text":"

An AudioClip can be exported to a file using the write_audiofile method:

clip.write_audiofile(\"path/to/output/audio.mp3\") # Write the clip to a file\n
"},{"location":"getting_started/basic_concepts/#final-flowchart","title":"Final Flowchart","text":"
graph TD\n    Start((Start)) --> ChooseClipType[Choose Clip Type]\n    ChooseClipType --> |VideoClip| CreateVideoClip[Create VideoClip]\n    ChooseClipType --> |ImageClip| CreateImageClip[Create ImageClip]\n    ChooseClipType --> |AudioClip| CreateAudioClip[Create AudioClip]\n    CreateVideoClip --> |Load from file| LoadVideoFile[\"Load from file\"]\n    CreateVideoClip --> |Create from image| CreateVideoFromImage[\"Create from image\"]\n    LoadVideoFile --> ModifyVideoClip[Modify VideoClip]\n    CreateVideoFromImage --> ModifyVideoClip\n    ModifyVideoClip --> |Set duration| SetDuration[\"Set duration\"]\n    ModifyVideoClip --> |Set FPS| SetFPS[\"Set FPS\"]\n    ModifyVideoClip --> |Set start/end| SetStartEnd[\"Set start/end\"]\n    ModifyVideoClip --> |Set audio| SetAudio[\"Set audio\"]\n    ModifyVideoClip --> ApplyEffectsToVideo[Apply Effects to VideoClip]\n    ApplyEffectsToVideo --> |Resize| ResizeClip[\"Resize\"]\n    ApplyEffectsToVideo --> |Crop| CropClip[\"Crop\"]\n    ApplyEffectsToVideo --> |Subclip| Subclip[\"Subclip\"]\n    ApplyEffectsToVideo --> |Apply custom effects| CustomEffects[\"Apply custom effects\"]\n    ApplyEffectsToVideo --> ExportVideoClip[Export VideoClip]\n    ExportVideoClip --> |Write to video file| WriteVideoFile[\"Write to video file\"]\n    ExportVideoClip --> |Write subclip to video file| WriteSubclip[\"Write subclip to video file\"]\n    ExportVideoClip --> |Write as image sequence| WriteImageSequence[\"Write as image sequence\"]\n    ExportVideoClip --> |Save frame| SaveFrame[\"Save frame\"]\n    CreateImageClip --> |Load from file| LoadImageFile[\"Load from file\"]\n    CreateImageClip --> |Create from PIL image| CreateFromPIL[\"Create from PIL image\"]\n    CreateImageClip --> |Create from numpy array| CreateFromNumpy[\"Create from numpy array\"]\n    LoadImageFile --> ModifyImageClip[Modify ImageClip]\n    CreateFromPIL --> ModifyImageClip\n    CreateFromNumpy --> ModifyImageClip\n    ModifyImageClip --> |Resize| ResizeImageClip[\"Resize\"]\n    ModifyImageClip --> ConvertToVideo[Convert to VideoClip and apply effects]\n    ResizeImageClip --> ConvertToVideo\n    ConvertToVideo --> |Apply effects| ApplyEffectsToVideo\n    ApplyEffectsToVideo --> ExportImageClip[Export ImageClip]\n    ExportImageClip --> WriteVideoFile\n    ExportImageClip --> WriteSubclip\n    ExportImageClip --> WriteImageSequence\n    ExportImageClip --> SaveFrame\n    CreateAudioClip --> |Load from file| LoadAudioFile[\"Load from file\"]\n    CreateAudioClip --> |Create silent clip| CreateSilentClip[\"Create silent clip\"]\n    LoadAudioFile --> ModifyAudioClip[Modify AudioClip]\n    CreateSilentClip --> ModifyAudioClip\n    ModifyAudioClip --> |Set FPS| SetFPS_audio[\"Set FPS\"]\n    ModifyAudioClip --> |Set start/end| SetStartEnd_audio[\"Set start/end\"]\n    ModifyAudioClip --> |Set audio data| SetAudioData[\"Set audio data\"]\n    ModifyAudioClip --> ApplyEffectsToAudio[Apply Effects to AudioClip]\n    ApplyEffectsToAudio --> |Subclip| Subclip_audio[\"Subclip\"]\n    ApplyEffectsToAudio --> |Apply normalize effect| NormalizeEffect[\"Apply normalize effect\"]\n    ApplyEffectsToAudio --> ExportAudioClip[Export AudioClip]\n    ExportAudioClip --> WriteAudioFile[\"Write to audio file\"]\n    WriteVideoFile --> End((End))\n    WriteSubclip --> End\n    WriteImageSequence --> End\n    SaveFrame --> End\n    WriteAudioFile --> End\n\n    click Start \"#\"\n    click ChooseClipType \"#basic-concepts\"\n    click CreateVideoClip \"#creating-a-videoclip\"\n    click LoadVideoFile \"#creating-a-videoclip\"\n    click ModifyVideoClip \"#modifying-a-videoclip\"\n    click CreateVideoFromImage \"#creating-a-videoclip\"\n    click SetDuration \"#modifying-a-videoclip\"\n    click SetFPS \"#modifying-a-videoclip\"\n    click SetStartEnd \"#modifying-a-videoclip\"\n    click SetAudio \"#modifying-a-videoclip\"\n    click ApplyEffectsToVideo \"#applying-effects-to-a-videoclip\"\n    click ResizeClip \"#applying-effects-to-a-videoclip\"\n    click CropClip \"#applying-effects-to-a-videoclip\"\n    click Subclip \"#applying-effects-to-a-videoclip\"\n    click CustomEffects \"#applying-effects-to-a-videoclip\"\n    click ExportVideoClip \"#exporting-a-videoclip\"\n    click WriteVideoFile \"#exporting-a-videoclip\"\n    click WriteSubclip \"#exporting-a-videoclip\"\n    click WriteImageSequence \"#exporting-a-videoclip\"\n    click SaveFrame \"#exporting-a-videoclip\"\n    click CreateImageClip \"#creating-an-imageclip\"\n    click LoadImageFile \"#creating-an-imageclip\"\n    click ModifyImageClip \"#modifying-an-imageclip\"\n    click CreateFromPIL \"#creating-an-imageclip\"\n    click CreateFromNumpy \"#creating-an-imageclip\"\n    click ResizeImageClip \"#modifying-an-imageclip\"\n    click ConvertToVideo \"#modifying-an-imageclip\"\n    click ApplyEffectsToVideo \"#applying-effects-to-a-imageclip\"\n    click ExportImageClip \"#exporting-an-imageclip\"\n    click WriteVideoFile \"#exporting-an-imageclip\"\n    click WriteSubclip \"#exporting-an-imageclip\"\n    click WriteImageSequence \"#exporting-an-imageclip\"\n    click SaveFrame \"#exporting-an-imageclip\"\n    click CreateAudioClip \"#creating-an-audioclip\"\n    click LoadAudioFile \"#creating-an-audioclip\"\n    click ModifyAudioClip \"#modifying-an-audioclip\"\n    click CreateSilentClip \"#creating-an-audioclip\"\n    click SetFPS_audio \"#modifying-an-audioclip\"\n    click SetStartEnd_audio \"#modifying-an-audioclip\"\n    click SetAudioData \"#modifying-an-audioclip\"\n    click ApplyEffectsToAudio \"#applying-effects-to-an-audioclip\"\n    click Subclip_audio \"#applying-effects-to-an-audioclip\"\n    click NormalizeEffect \"#applying-effects-to-an-audioclip\"\n    click ExportAudioClip \"#exporting-an-audioclip\"\n    click WriteAudioFile \"#exporting-an-audioclip\"\n    click End \"#final-flowchart\"
"},{"location":"getting_started/download_install/","title":"Download and Installation","text":""},{"location":"getting_started/download_install/#installation","title":"Installation","text":""},{"location":"getting_started/download_install/#using-pip","title":"Using pip","text":"

If you're utilizing pip, installation is a breeze. Just execute the following command:

pip install vidiopy\n
Warning: Requires setuptools

In case setuptools isn't installed, rectify this by using:

pip install setuptools\n

"},{"location":"getting_started/download_install/#using-source","title":"Using Source","text":"
  1. Download the source code from the GitHub repository.
  2. Unzip the downloaded file into a designated folder.
  3. Run the following command in the terminal:
python setup.py install\n
"},{"location":"getting_started/download_install/#dependencies","title":"Dependencies","text":"

VidioPy relies on the following Python packages:

  • rich (1)

  • numpy (2)

  • ffmpegio (3)

  • pillow (4)

  1. rich is a Python library for rich text and beautiful formatting in the terminal. It is used for displaying progress bars and other rich text in the terminal.
  2. numpy is a Python library for numerical computing. It is used for handling arrays and matrices.
  3. ffmpegio is a Python library for reading and writing video files using ffmpeg. It is used for reading and writing video files.
  4. pillow is a Python library for image processing. It is used for reading, writing and modifying image files.

Pip will automatically install these dependencies for you during installation. If installing from source, manual installation of these dependencies is required.

VidioPy also depends on ffmpeg and ffprobe. It will attempt to download these binaries globally or place them in the vidiopy/binary directory if not found in the system's PATH or global variables. If the automatic download fails, you can manually download them from here and place them in the vidiopy/binary folder or set them in global variables.

For those who prefer more control over paths, you can specify the locations of ffmpeg and ffprobe using the vidiopy.set_path() function after importing vidiopy:

import vidiopy\nvidiopy.set_path(ffmpeg_path=\"path/to/ffmpeg\", ffprobe_path=\"path/to/ffprobe\")\n

Alternatively, you can pass the path of the folder containing ffmpeg and ffprobe:

import vidiopy\nvidiopy.set_path(ffmpeg_path=\"path/to/folder/containing/ffmpeg & ffprobe\")\n
"},{"location":"getting_started/mixing_clip/","title":"Mixing clips","text":"

Video composition, also known as non-linear editing, is the fact of playing several clips together in a new clip. This video is a good example of what compositing you can do with VidioPy:

"},{"location":"getting_started/mixing_clip/#compositing-concatenating-clips","title":"compositing / Concatenating clips","text":"

Two simple ways of putting clips together is to concatenate them (to play them one after the other in a single long clip) or to compositing (to them side by side in a single larger clip).

"},{"location":"getting_started/mixing_clip/#concatenating-clips","title":"Concatenating clips","text":"

Concatenating means playing the clips one after the other in a single long clip. The function concatenate_videoclips takes a list of clips and returns a new clip that is the concatenation of all the clips in the list. Concatenation is done with the function concatenate_videoclips:

from vidiopy import VideoFileClip, ImageClip, concatenate_videoclips\nclip1 = VideoFileClip(\"video.mp4\").subclip(0,5)\nclip2 = ImageClip(\"image.jpg\").set_duration(5)\nf_clip = concatenate_videoclips([clip1,clip2], fps=24, over_scale=True)\nf_clip.write_videofile(\"output.mp4\")\n

The f_clip is a clip that plays the clips 1, and 2 one after the other. Note that the clips do not need to be the same size. If they aren't they will all appear centered in a clip large enough to contain the biggest of them, with optionally a color of your choosing to fill the borders. You have many other options there (see the doc of the function).

"},{"location":"getting_started/mixing_clip/#compositing-clip","title":"Compositing Clip","text":"

Compositing is done with the function composite_videoclips:

video = CompositeVideoClip([clip1,clip2,clip3])\n

Now video plays clip1, and clip2 on top of clip1, and clip3 on top of clip1, and clip2. For instance, if clip2 and clip3 have the same size as clip1, then only clip3, which is on top, will be visible in the video\u2026 unless clip3 and clip2 have masks which hide parts of them. Note that by default the composition has the size of the largest clip or first if bg_clip=True.

"},{"location":"getting_started/mixing_clip/#starting-and-stopping-times","title":"Starting and stopping times","text":"

In a CompositionClip, all the clips start to play at a time that is specified by the clip.start attribute. You can set this starting time as follows:

clip1 = clip1.with_start(5) # start after 5 seconds So for instance your composition will look like

video = CompositeVideoClip([clip1, # starts at t=0\n                            clip2.with_start(5), # start at t=5s\n                            clip3.with_start(9)]) # start at t=9s\n

In the example above, maybe clip2 will start before clip1 is over.

"},{"location":"getting_started/mixing_clip/#positioning-clips","title":"Positioning clips","text":"

If clip2 and clip3 are smaller than clip1, you can decide where they will appear in the composition by setting their position. Here we indicate the coordinates of the top-left pixel of the clips:

video = CompositeVideoClip([clip1,\n                           clip2.with_position((45,150)),\n                           clip3.with_position((90,100))])\n

There are many ways to specify the position:

clip2.with_position((45,150)) # x=45, y=150 , in pixels\n\nclip2.with_position(\"center\") # automatically centered\n\n# clip2 is horizontally centered, and at the top of the picture\nclip2.with_position((\"center\",\"top\"))\n\n# clip2 is vertically centered, at the left of the picture\nclip2.with_position((\"left\",\"center\"))\n\n# clip2 is at 40% of the width, 70% of the height of the screen:\nclip2.with_position((0.4,0.7), relative=True)\n\n# clip2's position is horizontally centered, and moving down!\nclip2.with_position(lambda t: ('center', 50+t) )\n

When indicating the position keep in mind that the y coordinate has its zero at the top of the picture:

"},{"location":"getting_started/mixing_clip/#compositing-audio-clips","title":"Compositing audio clips","text":"

When you mix video clips together, MoviePy will automatically compose their respective audio tracks to form the audio track of the final clip, so you don\u2019t need to worry about compositing these tracks yourself.

If you want to make a custom audiotrack from several audio sources: audioc clips can be mixed together with CompositeAudioClip and concatenate_audioclips:

from moviepy import *\n# ... make some audio clips aclip1, aclip2, aclip3\nconcat = concatenate_audioclips([aclip1, aclip2, aclip3])\ncompo = CompositeAudioClip([aclip1.multiply_volume(1.2),\n                            aclip2.with_start(5), # start at t=5s\n                            aclip3.with_start(9)])\n
"},{"location":"getting_started/quick_presentation/","title":"Getting started to use VidioPy","text":""},{"location":"getting_started/quick_presentation/#advantages-and-limitations","title":"Advantages and limitations","text":"

VidioPy has been developed with the following goals in mind:

Advantages:

  • Simple syntax for cutting, concatenations, title insertions, video compositing, video processing, and creation of custom effects.
  • Same syntax for all operating systems (Linux, MacOX, Windows).
  • Flexible : You have total control over the frames of the video and audio, and creating your own effects is easy as Py.
  • Fast : you can batch operations as much as you want, backend in ffmpeg, pillow, numpy, etc. for speed.
  • Supports most video formats and codecs. & Question Support.

limitations:

  • still in development.
  • less documentation & Features.
"},{"location":"getting_started/quick_presentation/#how-vidiopy-works","title":"How Vidiopy works","text":"

Vidiopy Uses the ffmpeg (1) library to read and write video files. The processing of the different media is is proceed using modules like Numpy, opencv, Pillow, ETC.

  1. ffmpeg is a tool for handling multimedia files. It is used for reading and writing video files, and for converting between different video and audio formats.
flowchart LR\n    subgraph clips\n        video(film reel)\n        audio(sound wave)\n        pictures(image)\n    end\n    Processing[numpy, opencv, pillow, etc]\n    subgraph processing\n        Processing\n    end\n    subgraph output\n        Output_Image(Image Sequence, Image File)\n        Output_Video(Video File)\n        Output_Audio(Audio File)\n    end\n    video -->|ffmpeg| processing\n    audio -->|ffmpeg| processing\n    pictures -->|ffmpeg or pillow| processing\n    processing -->|ffmpeg| Output_Video\n    processing -->|ffmpeg| Output_Audio\n    processing -->|ffmpeg or pillow| Output_Image
"},{"location":"getting_started/quick_presentation/#example-code","title":"Example code","text":"
from vidiopy import VideoFileClip, TextClip\n\n# Load myHolidays.mp4 and trimming it to 10 seconds. 50s to 60s.\nclip = VideoFileClip(\"myHolidays.mp4\").subclip(50,60)\n\n# Generate a text clip. You can customize the font, color, etc.\ntxt_clip = TextClip(\"My Holidays 2013\", font_size=70, txt_color='white', bg_color='gray', font=r'path/to/font.ttf')\ntxt_clip = txt_clip.set_pos('center', 'right').set_duration(10)\n\n# Overlay the text clip on the first video clip\nvideo = CompositeVideoClip([clip, txt_clip])\n\n# Write the result to a video file in any format\nvideo.write_videofile(\"myHolidays_edited.webm\")\nvideo.write_videofile(\"myHolidays_edited.mp4\")\nvideo.write_videofile(\"myHolidays_edited.avi\")\nvideo.write_videofile(\"myHolidays_edited.mkv\")\n\n# Writing single frame\nvideo.save_frame(\"frame.png\", t=0.5) # t= time in seconds\n\n# Writing Image Sequence\nvideo.write_image_sequence(\"image%03d.png\", fps=24) # %03d are placeholders for the numbers 001, 002, 003, etc. fps = frames per second\nvideo.write_image_sequence(\"image%03d.jpg\", fps=24) # %03d are placeholders for the numbers 001, 002, 003, etc. fps = frames per second\nvideo.write_image_sequence(\"image%03d.bmp\", fps=24) # %03d are placeholders for the numbers 001, 002, 003, etc. fps = frames per second\n
"},{"location":"getting_started/read%26write/","title":"Reading/Writing Video & Audio","text":""},{"location":"getting_started/read%26write/#reading-video-from-file","title":"Reading Video from file","text":"

The first step of video editing is to reading them from file. this Van be Done using vidiopy.VideoFileClip class. This class takes the path of the video file as input and returns a video which inherits from VideoClip class.

import vidiopy\nvideo = vidiopy.VideoFileClip(\"path/to/video.extension\") # you can perform the operations on the video object\nvideo_without_audio = vidiopy.VideoFileClip(\"path/to/video.extension\", audio=False) # defaults to `audio=True`\n

if the video do not have the audio then it will create a silence clip

"},{"location":"getting_started/read%26write/#writing-video-to-file","title":"Writing Video to file","text":"

To Write the Video we can use the write_videofile function inside the VideoClip. Other clip type inherent it from the VideoClip.

import vidiopy\nvideo = vidiopy.VideoFileClip(\"path/to/video.extension\")\nvideo.write_videofile(\"path/to/output/video.extension\", fps=30) # fps is optional it will default use the fps of the video if it is set\n
"},{"location":"getting_started/read%26write/#reading-audio-from-file","title":"Reading Audio from file","text":"

To read the audio from the file we can use the AudioFileClip class. This class takes the path of the audio file as input and returns a audio which inherits from AudioClip class.

import vidiopy\naudio = vidiopy.AudioFileClip(\"path/to/audio.extension\")\n
"},{"location":"getting_started/read%26write/#writing-audio-to-file","title":"Writing Audio to file","text":"

To Write the Audio we can use the write_audiofile function inside the AudioClip. Other clip type inherent it from the AudioClip.

import vidiopy\naudio = vidiopy.AudioFileClip(\"path/to/audio.extension\")\naudio.write_audiofile(\"path/to/output/audio.extension\")\n
"},{"location":"more/CONTRIBUTING/","title":"VidioPy's Contribution Guidelines","text":""},{"location":"more/CONTRIBUTING/#communication-on-github","title":"Communication on GitHub","text":"
  • Keep discussions on GitHub issues and pull requests focused and concise. Remember that each comment triggers a notification for multiple people.
  • Before making significant changes to the core codebase, discuss them with the team.
"},{"location":"more/CONTRIBUTING/#setting-up-your-development-environment","title":"Setting Up Your Development Environment","text":"
  • Fork the official VidioPy repository to your own GitHub account.
  • Clone the forked repository to your local machine.
  • Create and activate a Python virtual environment to isolate the project dependencies.
  • Navigate to the cloned directory and run pip install -e . to install the project dependencies.
  • Regularly sync your local repository with the main repository to stay up-to-date with the latest changes.
"},{"location":"more/CONTRIBUTING/#coding-standards-and-code-quality","title":"Coding Standards and Code Quality","text":"
  • Adhere to the PEP8 coding conventions for Python.
  • Use comments judiciously and only when necessary. Aim to write self-explanatory code.
  • Choose clear and descriptive names for variables, functions, and classes.
  • Document new features or bug fixes with docstring. Update the documentation in the docs/markdown/ directory as needed.
  • Use Prettier to maintain consistent code formatting.
  • Review your code in PyCharm or VSCode to catch potential edge cases.
  • When adding new functions or features, update the corresponding unit tests or mention the need for new tests in your pull request.
  • read the Code Style Guide
"},{"location":"more/CONTRIBUTING/#submitting-pull-requests","title":"Submitting Pull Requests","text":"
  • You can submit a pull request (PR) even if your work is still in progress; it doesn't have to be fully finished.
  • Before submitting your PR, run the test suite using pytest to ensure your changes haven't broken anything.
  • Provide a clear and detailed description of your changes when submitting your PR. This will help the reviewers understand your work and expedite the review process.
"},{"location":"more/code%20style%20guide/","title":"code style guide","text":""},{"location":"more/code%20style%20guide/#functions-and-methods-docstrings","title":"functions and methods docstrings","text":""},{"location":"more/code%20style%20guide/#function-and-method-docstring-template","title":"function and method docstring template","text":"
\"\"\"\\\nA Brief Description of the Function or Method\n\n#### Parameters:\n    - `param1` `type`: -\n        The first Doc.\n    - `param2` `type[type, type]`: -\n        The second Doc.\n    - `param3` `(type, optional, ...)`: -\n        The third Doc.\n    - `param4` `(type, optional, default=None)`: -\n        The fourth Doc.\n    - `*param5` `(type, optional, ...)`: -\n        The fifth Doc.\n    - `**param6` `(type, optional, ...)`: -\n        The sixth Doc.\n\n#### returns: # if return Multiple things\n    - `int`: - an a xyz.\n    - `float`: - an a abc\n\n#### return: `int` # if return Single thing\n    Doc goes here.\n\n#### return: `None` # if do not return anything\n\n#### raises: # add if needed\n    - `Error`: - if xyz.\n    - `Exception`: - if abc.\n\n#### Note: # add if needed\n    - xyz\n    - More notes.\n\n#### Warning: # add if needed\n    - xyz\n    - More warnings.\n\n#### examples:\n    example 1 :\n\n    \\`\\`\\`python\n    >>> code\n    output\n    \\`\\`\\`\n    example 2 :\n\n    \\`\\`\\`python\n    code # explain\n    \\`\\`\\`\n    - More examples.\n\n#### TODO: # add if needed\n    - xyz\n    - More TODOs.\n\n#### [function reference manual](https://github.com/SohamTilekar/vidiopy/blob/master/docs/...)\n\n\"\"\"\n
"},{"location":"more/code%20style%20guide/#function-and-method-docstring-conventions","title":"function and method docstring conventions","text":"
  • Docstrings are always triple quoted strings use \"\"\" not '''.
  • add a blank line after the docstring.
  • use the #### for the sections.
  • add as much detail as possible.
  • add the link to Function or Method Reference manuel.
"},{"location":"more/code%20style%20guide/#class-docstrings","title":"class docstrings","text":""},{"location":"more/code%20style%20guide/#class-docstring-template","title":"class docstring template","text":"
\"\"\"\\\nA Brief Description of the Class\n\nproperties:\n    - `property1`: - a short 1 line description of the property.\n    - `property2`: - a short 1 line description of the property.\n\nmethods:\n    - `method1`: - a short 1 line description of the method.\n    - `method2`: - a short 1 line description of the method.\n\nabstract methods:\n    - `method1`: - a short 1 line description of the method.\n    - `method2`: - a short 1 line description of the method.\n\n#### Note: # add if needed\n    - xyz\n    - More notes.\n\n#### Warning: # add if needed\n    - xyz\n    - More warnings.\n\n#### examples:\n    example 1 :\n    \\`\\`\\`python\n    >>> code\n    output\n    \\`\\`\\`\n\n    example 2 :\n    \\`\\`\\`python\n    code # explain\n    \\`\\`\\`\n    - More examples.\n\"\"\"\n
"},{"location":"more/code%20style%20guide/#class-docstring-conventions","title":"class docstring conventions","text":"
  • Docstrings are always triple quoted strings use \"\"\" not '''.
  • add a blank line after the docstring.
  • use the #### for the sections.
  • add as much detail as possible.
"},{"location":"more/code%20style%20guide/#comments","title":"Comments","text":"
  • Use as less comments as possible.
  • Use comments where code is not self explanatory or weird.
"},{"location":"reference_manual/reference_manual/","title":"Reference Manual","text":"
  • Clip

  • VideoClips

    • VideoClip
    • VideoFileClip
    • ImageClips
    • ImageSequenceClip
    • Mixing Clip
  • Audioclips

    • AudioClip
    • AudioFileClip
    • AudioArrayClip
    • SilenceClip
    • Mixing Audio
"},{"location":"reference_manual/clips/clip/","title":"Clip","text":"

class vidiopy.Clip.Clip

Bases: object

A Clip is the base class for all the clips (VideoClip and AudioClip).

fx(func, *args, **kwargs)

Apply a function to the current instance and return the result.

This method allows for the application of any callable to the current instance of the class. The callable should take the instance as its first argument, followed by any number of positional and keyword arguments.

Parameters:
  • func: (Callable[..., Self]): The function to apply. This should take the instance as its first argument.
  • *args: Variable length argument list for the function.
  • **kwargs: Arbitrary keyword arguments for the function.
Returns:
  • Self: The result of applying the function to the instance.
Example:
>>> clip = Clip()\n>>> def do(instance):\n...     # Do something with instance.\n...     return instance.\n...\n>>> new_clip = clip.fx(do)\n

copy()

Creates a deep copy of the current Clip object.

This method creates a new instance of the Clip object, copying all the attributes of the current object into the new one. If the current object has an 'audio' attribute, it also creates a deep copy of this 'audio' object and assigns it to the 'audio' attribute of the new Clip object.

Returns: Clip: A new Clip object that is a deep copy of the current object.

close()

Release any resources that are in use.

__enter__()

Enter the context manager.

__exit__()

Exit the context manager."},{"location":"reference_manual/clips/audio_clips/audioarrayclip/","title":"AudioArrayClip","text":"

class vidiopy.AudioArrayClip

Bases: vidiopy.AudioClip

AudioArrayClip is a class that represents an audio clip from an array. It extends the AudioClip class.

Parameters:
  • audio_data: np.ndarray: The audio data.
  • fps: int: The sample rate of the audio clip.
  • duration: int | float: The duration of the audio clip.
Example:

```python import numpy as np import vidiopy

audio_data = np.random.uniform(-1, 1, 44100 * 3) # 3 seconds of random audio audio_clip = vidiopy.AudioArrayClip(audio_data, fps=44100) ```

"},{"location":"reference_manual/clips/audio_clips/audioclip/","title":"AudioClip","text":"

class vidiopy.AudioClip

Bases: vidiopy.Clip

The AudioClip class represents an audio clip. It is a subclass of the Clip class.

Parameters:
  • duration (int or float, optional): The duration of the audio clip. Defaults to None.
  • fps (int, optional): Frames per second of the audio clip. Defaults to None.
Attributes:
  • fps: int | None: The frames per second of the audio clip. Defaults to fps Parameter.
  • _original_dur: int | float | None: The original duration of the audio clip. Defaults to duration Parameter.
  • _audio_data: np.ndarray | None: The audio data of the clip. Defaults to None.
  • channels: int | None: The number of audio channels. Defaults to None.
  • _st: int | float: The start time of the audio clip. Defaults to 0.0.
  • _ed: int | float | None: The end time of the audio clip. Defaults to duration Parameter.
Properties:

audio_data: np.ndarray

This property gets the audio data. If the audio data is not set, it raises a ValueError.

Returns: np.ndarray: The audio data. Raises: ValueError: If the audio data is not set.

Example: :

>>> clip = AudioClip()\n>>> clip.audio_data = np.array([1, 2, 3])\n>>> print(clip.audio_data)\narray([1, 2, 3])\n

duration: int | float

This property gets the duration of the audio clip. The duration is represented in seconds and can be an integer, a float, or None if the duration is not set.

Note: You Can't Set the duration of the audio clip it is not allowed to change directly.

Raises: AttributeError: Always raises an AttributeError if you try to set duration.

Returns: int | float: The duration of the audio clip.

Example: :

>>> clip = AudioClip(duration=10)\n>>> print(clip.duration)\n10\n

start: int | float

This property gets the start time of the audio clip. The start time is represented in seconds and can be an integer or a float.

Returns: int | float: The start time of the audio clip.

Example: :

>>> clip = AudioClip()\n>>> print(clip.start)\n0.0\n>>> clip.start = 5\n>>> print(clip.start)\n5\n

end: int | float | None

This property gets the end time of the audio clip. The end time is represented in seconds and can be an integer, a float, or None if the end time is not set.

Returns: int | float | None: The end time of the audio clip.

Example: :

>>> clip = AudioClip(duration=10)\n>>> print(clip.end)\n10\n>>> clip.end = 5\n>>> print(clip.end)\n5\n

Methods:

def set_data(self, audio_data: np.ndarray) -> Self:

This method sets the audio data and returns the instance of the class.

Args: audio_data (np.ndarray): The audio data to set. Returns: AudioClip: The instance of the class.

Example: :

    >>> clip = AudioClip()\n    >>> clip.set_data(np.array([1, 2, 3]))\n    >>> print(clip.audio_data)\n    array([1, 2, 3])\n

def set_fps(self, fps: int | None) -> Self:

This method sets the frames per second (fps) for the audio clip and returns the instance of the class.

Args: fps: int | None: The frames per second to set. If None, the fps will be unset. Returns: AudioClip: Self The Instance of the class.

Example: :

>>> clip = AudioClip()\n>>> clip.set_fps(30)\n>>> print(clip.fps)\n30\n

def set_start(self, start: int | float) -> Self:

This method sets the start time of the audio clip and returns the instance of the class. The start time is represented in seconds and can be an integer or a float.

Args: start: int | float: The start time to set in seconds. Returns: AudioClip: The instance of the class with the updated start time.

Example: :

>>> clip = AudioClip()\n>>> clip.set_start(5)\n>>> print(clip.start)\n5\n

def set_end(self, end: int | float | None) -> Self:

This method sets the end time of the audio clip and returns the instance of the class. The end time is represented in seconds and can be an integer, a float, or None if the end time is not to be set.

Args: end: int | float | None: The end time to set in seconds. Returns: AudioClip: The instance of the class with the updated end time.

Example: :

>>> clip = AudioClip()\n>>> clip.set_end(10)\n>>> print(clip.end)\n10\n

def get_frame_at_t(self, t: int | float) -> np.ndarray:

This method gets the audio frame at a specific time t. The time t is represented in seconds and can be an integer or a float. It calculates the frame index using the duration, total frames, and time t, and returns the audio data at that frame index.

Args: t: int | float: The time in seconds at which to get the audio frame. Returns: np.ndarray: The audio data at the specified time. Raises: ValueError: If frames per second (fps) is not set, audio data is not set, or original duration is not set.

def iterate_frames_at_fps(self, fps: int | float | None = None) -> Generator[np.ndarray, None, None]:

This method generates audio frames at a specific frames per second (fps) rate. If no fps is provided, it uses the fps set in the AudioClip instance. It calculates the original fps using the duration and total frames, then generates frames at the specified fps rate.

Args: fps (int | float | None, optional): The frames per second rate at which to generate frames. If not provided, the fps set in the AudioClip instance is used. Yields: np.ndarray: The audio data at each frame. Raises: ValueError: If frames per second (fps) is not set, audio data is not set, or original duration is not set.

def iterate_all_frames(self) -> Generator[np.ndarray, None, None]:

This method generates all audio frames in the AudioClip instance. It iterates over each frame in the audio data and yields it.

Yields: np.ndarray: The audio data at each frame. Raises: ValueError: If audio data is not set.

def fl_frame_transform(self, func, *args, **kwargs) -> Self:

This method applies a function to each frame of the audio data. The function should take a frame (an ndarray of channel data) as its first argument, followed by any number of additional positional and keyword arguments.

Args:
  • func (Callable): The function to apply to each frame. It should take a frame (an ndarray of channel data) as its first argument.
  • *args: Additional positional arguments to pass to the function.
  • **kwargs: Additional keyword arguments to pass to the function.
Returns: AudioClip: The instance of the class with the transformed audio data. Raises: ValueError: If audio data is not set.

def fl_clip_transform(self, func, *args, **kwargs) -> Self:

This method applies a function to the entire audio data. The function should take the AudioClip instance as its first argument, followed by any number of additional positional and keyword arguments.

Args:
  • func (Callable): The function to apply to the audio data. It should take the AudioClip instance as its first argument.
  • *args: Additional positional arguments to pass to the function.
  • **kwargs: Additional keyword arguments to pass to the function.
Returns: AudioClip: The instance of the class with the transformed audio data. Raises: ValueError: If audio data is not set.

def fl_time_transform(self, func: Callable[[int | float], int | float]) -> Self:

This method applies a time transformation function to the get_frame_at_t method of the AudioClip instance. The transformation function should take a time (an integer or a float) as its argument and return a transformed time.

The get_frame_at_t method is replaced with a new method that applies the transformation function to its argument before calling the original method.

Args: func (Callable[[int | float], int | float]): The time transformation function to apply. It should take a time (an integer or a float) as its argument and return a transformed time. Returns: AudioClip: The instance of the class with the transformed get_frame_at_t method. Raises: ValueError: If the get_frame_at_t method is not set.

def sub_clip_copy(self, start: float | int | None = None, end: float | int | None = None) -> Self

This method creates a copy of the AudioClip instance and then creates a subclip from the audio clip starting from start to end in the copied instance. If start or end is not provided, it uses the start or end time set in the AudioClip instance. If neither is set, it uses 0 for start and the duration for end.

It calculates the original frames per second (fps) using the duration and total frames, then calculates the start and end frame indices using the original fps. It then updates the audio data, original duration, end time, and start time of the copied AudioClip instance.

Args:
  • start (float | int | None, optional): The start time of the subclip in seconds. If not provided, the start time set in the AudioClip instance is used. Defaults to None.
  • end (float | int | None, optional): The end time of the subclip in seconds. If not provided, the end time set in the AudioClip instance is used. Defaults to None.
Returns: AudioClip: A copy of the instance of the class with the updated audio data, original duration, end time, and start time. Raises: ValueError: If audio data is not set, original duration is not set, or end time is greater than the original duration.

def copy(self) -> Self:

This method creates a deep copy of the AudioClip instance and returns it. It uses the copy_ function, which should be a deep copy function like copy.deepcopy in Python's standard library. Returns: AudioClip: A deep copy of the instance of the class. Raises: ValueError: If the copy_ function is not set or does not correctly create a deep copy. > def write_audiofile(self, path: str, fps: int | None = None, overwrite=True, show_log=False, **kwargs) -> None:

This method writes the audio data to an audio file at the specified path. It uses the frames per second (fps) if provided, otherwise it uses the fps set in the AudioClip instance. It raises a ValueError if fps is not set in either way. It also raises a ValueError if audio data, original duration, or channels are not set.

It creates a temporary audio data array by getting the frame at each time step from 0 to the end or duration with a step of 1/fps. It then writes the temporary audio data to the audio file using the ffmpegio.audio.write function.

Args:
  • path (str): The path to write the audio file to.
  • fps (int | None, optional): The frames per second to use. If not provided, the fps set in the AudioClip instance is used. Defaults to None.
  • overwrite (bool, optional): Whether to overwrite the audio file if it already exists. Defaults to True.
  • show_log (bool, optional): Whether to show the log of the ffmpegio.audio.write function. Defaults to False.
  • **kwargs: Additional keyword arguments to pass to the ffmpegio.audio.write function.
Raises: ValueError: If fps is not set, audio data is not set, original duration is not set, or channels are not set."},{"location":"reference_manual/clips/audio_clips/audiofileclip/","title":"AudioFileClip","text":"

class vidiopy.AudioFileClip

Bases: vidiopy.SilenceClip

AudioFileClip is a class that represents an audio file. It extends the SilenceClip class.

Parameters:
  • path: str | pathlib.Path: The path to the audio file.
  • duration (int | float | None, optional): The duration of the audio file. If not provided, it will be calculated from the audio file.
Raises:
  • ValueError: If the audio file is empty and duration is not provided.
"},{"location":"reference_manual/clips/audio_clips/mixingaudio/","title":"Concatenating Audio Clips","text":"

def concatenate_audioclips(clips: list[AudioClip], fps: int | None = 44100) -> AudioClip | AudioArrayClip:

Concatenates multiple audio clips into a single audio clip.

Parameters:
  • clips: list[AudioClip]: A list of AudioClip objects to be concatenated.
  • fps (int, optional): The frames per second (fps) for the output AudioClip. If not provided, it defaults to 44100, or the maximum fps value found in the input clips.
Returns:

AudioClip | AudioArrayClip: The concatenated AudioClip. If the input clips have different channels, the output AudioClip will have the maximum number of channels found in the input clips, and the missing channels in the other clips will be filled with the mean value of their existing channels.

Raises:

ValueError: If no clips are provided, or if no fps value is found or set, or if a clip's channels are not set.

Note:
  • The duration of the output AudioClip is the sum of the durations of the input clips.
  • If a clip's end time is set, it is used to calculate its duration; otherwise, its duration attribute is used.
  • If neither is set, a ValueError is raised.
"},{"location":"reference_manual/clips/audio_clips/mixingaudio/#compositing-audio-clips","title":"Compositing Audio Clips","text":"

def composite_audioclips(clips: list[AudioClip], fps: int | None = 44100, use_bg_audio: bool = False) -> AudioArrayClip:

Composites multiple audio clips into a single audio clip.

Parameters:
  • clips: list[AudioClip]: A list of AudioClip objects to be composited.
  • fps (int, optional): The frames per second (fps) for the output AudioClip. If not provided, it defaults to the maximum fps value found in the input clips.
  • use_bg_audio (bool, optional): If True, the first clip in the list is used as the background audio. The remaining clips are overlaid on top of this background audio. If False, a SilenceClip of the maximum duration found in the clips is used as the background audio.
Returns:

AudioArrayClip: The composited AudioClip. The output AudioClip will have the maximum number of channels found in the input clips, and the missing channels in the other clips will be filled with the mean value of their existing channels.

Raises:

ValueError: If no clips are provided, or if no fps value is found or set, or if a clip's channels are not set, or if no duration is found or set in the clips when use_bg_audio is False.

Note:
  • The duration of the output AudioClip is the duration of the background audio.
  • If a clip's end time is set, it is used to calculate its duration; otherwise, its duration attribute is used.
  • If neither is set, a ValueError is raised.
"},{"location":"reference_manual/clips/audio_clips/silenceclip/","title":"Silence Clip","text":"

class vidiopy.SilenceClip

Bases: vidiopy.AudioClip

SilenceClip is a subclass of AudioClip that represents a silent audio clip.

It inherits from AudioClip therefore it has all the methods and attributes of AudioClip.

Parameters:
  • duration: int | float: The duration of the audio clip.
  • fps (int, optional): The frames per second of the audio clip. Default is 44100.
  • channels (int, optional): The number of audio channels. Default is 1.
"},{"location":"reference_manual/clips/video_clips/imageclips/","title":"ImageClip","text":"

class vidiopy.ImageClip(image: str | Path | Image.Image | np.ndarray | None = None, fps: int | float | None = None, duration: int | float | None = None)

Bases: vidiopy.VideoClip

All Methods and properties of the VideoClip class are available.

A class representing a video clip generated from a single image.

Parameters:
  • image: str | Path | Image.Image | np.ndarray | None: The image to use for the video clip. If None, an empty video clip is created.
  • fps: int | float | None: The frames per second of the video clip. If None, the fps is set to 30.
  • duration: int | float | None: The duration of the video clip in seconds. If None, the duration is set to 1.
Attributes:
  • image: Image.Image: The image used for the video clip.
  • Other attributes are inherited from the VideoClip class.
Methods:

_import_image(self, image) -> Image.Image:

Import the image from various sources.

Does not made for external use.

Parameters: image (str | Path | Image.Image | np.ndarray): Input image data. Returns: Image.Image: The imported image data.

This is a private method and not intended for external use.

You Can Use set_duration() & duration property to change _dur.

fl_frame_transform(self, func, *args, **kwargs) -> Self:

Apply a frame transformation function to the image.

Parameters: func (Callable): The frame transformation function. *args: Additional positional arguments for the function. **kwargs: Additional keyword arguments for the function. Returns: ImageClip: A new ImageClip instance with the transformed image. Note: This method modifies the current ImageClip instance in-place. Example Usage:
image_clip = ImageClip(image_path, fps=30, duration=5.0)\ntransformed_clip = image_clip.fl_frame_transform(resize, width=640, height=480)\n

fl_frame_transform(self, func, *args, **kwargs) -> Self:

Apply a frame transformation function to the image.

Parameters: func (Callable): The frame transformation function. *args: Additional positional arguments for the function. **kwargs: Additional keyword arguments for the function. Returns: ImageClip: A new ImageClip instance with the transformed image. Note: This method modifies the current ImageClip instance in-place. Example Usage:
image_clip = ImageClip(image_path, fps=30, duration=5.0)\ntransformed_clip = image_clip.fl_frame_transform(resize, width=640, height=480)\n

fl_clip_transform(self, func, *args, **kwargs) -> Self:

Raise a ValueError indicating that fl_clip is not applicable for ImageClip.

The Clip should be converted to VideoClip using to_video_clip method first.

Parameters: func: Unused. *args: Unused. **kwargs: Unused. Returns: ImageClip: The current ImageClip instance. Raises: ValueError: This method is not applicable for ImageClip. Example Usage:
image_clip = ImageClip(image_path, fps=30, duration=5.0)\nimage_clip.fl_clip(some_function)  # Raises ValueError\n

fx(self, func: Callable, *args, **kwargs):

Apply a generic function to the ImageClip.

Parameters: func (Callable): The function to apply. *args: Additional positional arguments for the function. **kwargs: Additional keyword arguments for the function. Returns: ImageClip: The current ImageClip instance. Note: This method modifies the current ImageClip instance in-place. Example Usage:
def custom_function(image):\n    # Some custom processing on the image\n    return modified_image\n\nimage_clip = ImageClip(image_path, fps=30, duration=5.0)\nimage_clip.fx(custom_function, some_arg=42)\n

sub_fx(self, func, *args, start_t: int | float | None = None, end_t: int | float | None = None, **kwargs) -> Self:

Apply a custom function to the Image Clip.

Note: Before using the sub_fx method, you need to convert the image clip to a video clip using to_video_clip() function. Args: func: The custom function to apply to the Image Clip. *args: Additional positional arguments to pass to the custom function. start_t (int | float | None): The start time of the subclip in seconds. If None, the subclip starts from the beginning. end_t (int | float | None): The end time of the subclip in seconds. If None, the subclip ends at the last frame. **kwargs: Additional keyword arguments to pass to the custom function. Returns: Self: The modified ImageClips instance. Example:
# Convert the image clip to a video clip\nvideo_clip = image_clip.to_video_clip()\n\n# Apply a custom function to the video clip\nmodified_clip = video_clip.sub_fx(custom_function, start_t=2, end_t=5)\n
Raises: ValueError: If the method is called on an Image Clip instead of a Video Clip.

sub_clip_copy(self, start: int | float | None = None, end: int | float | None = None) -> Self:

Create a copy of the current clip and apply sub-clip operation. Read more about sub-clip operation in the sub_clip method.

Args: start (int | float | None): Start time of the sub-clip in seconds. If None, the sub-clip starts from the beginning of the original clip. end (int | float | None): End time of the sub-clip in seconds. If None, the sub-clip ends at the end of the original clip. Returns: Self: A new instance of the clip with the sub-clip applied. Example:
image_clip = ImageClip(image_path, fps=30, duration=5.0)\nsub_clip = image_clip.sub_clip_copy(start=2, end=5)\n

sub_clip(self, start: int | float | None = None, end: int | float | None = None) -> Self:

Returns a sub-clip of the current clip.

Args: start (int | float | None, optional): The start time of the sub-clip in seconds. Defaults to None. end (int | float | None, optional): The end time of the sub-clip in seconds. Defaults to None. Returns: Self: The sub-clip. Note: It modifies the current clip in-place. If both start and end are None, the original clip is returned. If start is None, it defaults to 0. If end is None, it defaults to the end time of the original clip. Example:
image_clip = ImageClip(image_path, fps=30, duration=5.0)\nimage_clip.sub_clip(start=2, end=5)\n

make_frame_array(self, t):

Gives the numpy array representation of the image at a given time.

Args: t (float): The timestamp of the frame. Returns: numpy.ndarray: The numpy array representation of the image. Raises: ValueError: If the image is not set.

make_frame_pil(self, t) -> Image.Image:

Returns the image frame at a given time.

Args: t (float): The time at which to retrieve the frame. Returns: PIL.Image.Image: The image frame at the given time. Raises: ValueError: If the image is not set.

to_video_clip(self, fps=None, duration=None):

Convert ImageClip to VideoClip

If fps or duration is not provided, it defaults to the corresponding attribute of the ImageClip instance. If those attributes are not available, a ValueError is raised.

Parameters: fps (float, optional): Frames per second of the resulting video clip. If not provided, it defaults to the fps attribute of the ImageClip instance. If that is also not available, a ValueError is raised. duration (float, optional): Duration of the resulting video clip in seconds. If not provided, it defaults to the duration attribute of the ImageClip instance. If that is also not available, a ValueError is raised. Returns: ImageSequenceClip: A VideoClip subclass instance generated from the ImageClip frames. Raises: ValueError: If fps or duration is not provided and the corresponding attribute is not available. Note: The to_video_clip method returns an instance of the ImageSequenceClip class, which is a subclass of the VideoClip Class. Example Usage:
# Example Usage\nimage_clip = ImageClip()\nvideo_clip = image_clip.to_video_clip(fps=24, duration=10.0)\nvideo_clip.sub_fx(custom_function, start_t=2, end_t=5)\n
"},{"location":"reference_manual/clips/video_clips/imageclips/#data2imageclip","title":"Data2ImageClip","text":"

class vidiopy.Data2ImageClip(data: np.ndarray | Image.Image, fps: int | float | None = None, duration: int | float | None = None)

Bases: vidiopy.ImageClip

A class representing a video clip generated from raw data (numpy array or PIL Image).

It extends the ImageClip class and allows users to create video clips from raw data, supporting either numpy arrays or PIL Images as input.

Parameters:
  • data (np.ndarray or PIL Image): The raw data to be converted into a video clip.
  • fps (int | float | None): Frames per second of the video. If not provided, it will be inherited from the parent class (ImageClip) or set to the default value.
  • duration (int | float | None): Duration of the video in seconds. If not provided, it will be inherited from the parent class (ImageClip) or set to the default value.
Attributes:
  • image (PIL Image): The PIL Image representation of the provided data.
  • size (tuple): The size (width, height) of the image.
Methods:

_import_image(self, image) -> Image.Image:

Private method to convert the provided data (numpy array or PIL Image) into a PIL Image.

Parameters: image (np.ndarray or PIL Image): The raw data to be converted. Returns: Image.Image: The PIL Image representation of the provided data. Raises: TypeError: If the input type is not supported (neither numpy array nor PIL Image). Example Usage:
# Import necessary libraries\n\n# Create a Data2ImageClip instance from a numpy array\ndata_array = np.random.randint(0, 255, size=(480, 640, 3), dtype=np.uint8)\nvideo_clip = Data2ImageClip(data=data_array, fps=30, duration=5)\n\n# Create a Data2ImageClip instance from a PIL Image\nfrom PIL import Image\ndata_image = Image.new('RGB', (640, 480), color='red')\nvideo_clip = Data2ImageClip(data=data_image, fps=24, duration=10)\n
Note:

The Data2ImageClip class extends the ImageClip. It allows users to create video clips from raw data, supporting either numpy arrays or PIL Images as input.

"},{"location":"reference_manual/clips/video_clips/imageclips/#colorclip","title":"ColorClip","text":"

class vidiopy.ColorClip(color: str | tuple[int, ...], mode=\"RGBA\", size=(1, 1), fps=None, duration=None)

Bases: #!py vidiopy.Data2ImageClip

A video clip class with a solid color.

It extends the Data2ImageClip class and allows users to create video clips with a solid color.

Parameters:
  • color: str | tuple[int, ...]: Color of the image. It can be a color name (e.g., 'red', 'blue') or RGB tuple.

    Available Color Names
    • aliceblue: \"#f0f8ff\",
    • antiquewhite: \"#faebd7\",
    • aqua: \"#00ffff\",
    • aquamarine: \"#7fffd4\",
    • azure: \"#f0ffff\",
    • beige: \"#f5f5dc\",
    • bisque: \"#ffe4c4\",
    • black: \"#000000\",
    • blanchedalmond: \"#ffebcd\",
    • blue: \"#0000ff\",
    • blueviolet: \"#8a2be2\",
    • brown: \"#a52a2a\",
    • burlywood: \"#deb887\",
    • cadetblue: \"#5f9ea0\",
    • chartreuse: \"#7fff00\",
    • chocolate: \"#d2691e\",
    • coral: \"#ff7f50\",
    • cornflowerblue: \"#6495ed\",
    • cornsilk: \"#fff8dc\",
    • crimson: \"#dc143c\",
    • cyan: \"#00ffff\",
    • darkblue: \"#00008b\",
    • darkcyan: \"#008b8b\",
    • darkgoldenrod: \"#b8860b\",
    • darkgray: \"#a9a9a9\",
    • darkgrey: \"#a9a9a9\",
    • darkgreen: \"#006400\",
    • darkkhaki: \"#bdb76b\",
    • darkmagenta: \"#8b008b\",
    • darkolivegreen: \"#556b2f\",
    • darkorange: \"#ff8c00\",
    • darkorchid: \"#9932cc\",
    • darkred: \"#8b0000\",
    • darksalmon: \"#e9967a\",
    • darkseagreen: \"#8fbc8f\",
    • darkslateblue: \"#483d8b\",
    • darkslategray: \"#2f4f4f\",
    • darkslategrey: \"#2f4f4f\",
    • darkturquoise: \"#00ced1\",
    • darkviolet: \"#9400d3\",
    • deeppink: \"#ff1493\",
    • deepskyblue: \"#00bfff\",
    • dimgray: \"#696969\",
    • dimgrey: \"#696969\",
    • dodgerblue: \"#1e90ff\",
    • firebrick: \"#b22222\",
    • floralwhite: \"#fffaf0\",
    • forestgreen: \"#228b22\",
    • fuchsia: \"#ff00ff\",
    • gainsboro: \"#dcdcdc\",
    • ghostwhite: \"#f8f8ff\",
    • gold: \"#ffd700\",
    • goldenrod: \"#daa520\",
    • gray: \"#808080\",
    • grey: \"#808080\",
    • green: \"#008000\",
    • greenyellow: \"#adff2f\",
    • honeydew: \"#f0fff0\",
    • hotpink: \"#ff69b4\",
    • indianred: \"#cd5c5c\",
    • indigo: \"#4b0082\",
    • ivory: \"#fffff0\",
    • khaki: \"#f0e68c\",
    • lavender: \"#e6e6fa\",
    • lavenderblush: \"#fff0f5\",
    • lawngreen: \"#7cfc00\",
    • lemonchiffon: \"#fffacd\",
    • lightblue: \"#add8e6\",
    • lightcoral: \"#f08080\",
    • lightcyan: \"#e0ffff\",
    • lightgoldenrodyellow: \"#fafad2\",
    • lightgreen: \"#90ee90\",
    • lightgray: \"#d3d3d3\",
    • lightgrey: \"#d3d3d3\",
    • lightpink: \"#ffb6c1\",
    • lightsalmon: \"#ffa07a\",
    • lightseagreen: \"#20b2aa\",
    • lightskyblue: \"#87cefa\",
    • lightslategray: \"#778899\",
    • lightslategrey: \"#778899\",
    • lightsteelblue: \"#b0c4de\",
    • lightyellow: \"#ffffe0\",
    • lime: \"#00ff00\",
    • limegreen: \"#32cd32\",
    • linen: \"#faf0e6\",
    • magenta: \"#ff00ff\",
    • maroon: \"#800000\",
    • mediumaquamarine: \"#66cdaa\",
    • mediumblue: \"#0000cd\",
    • mediumorchid: \"#ba55d3\",
    • mediumpurple: \"#9370db\",
    • mediumseagreen: \"#3cb371\",
    • mediumslateblue: \"#7b68ee\",
    • mediumspringgreen: \"#00fa9a\",
    • mediumturquoise: \"#48d1cc\",
    • mediumvioletred: \"#c71585\",
    • midnightblue: \"#191970\",
    • mintcream: \"#f5fffa\",
    • mistyrose: \"#ffe4e1\",
    • moccasin: \"#ffe4b5\",
    • navajowhite: \"#ffdead\",
    • navy: \"#000080\",
    • oldlace: \"#fdf5e6\",
    • olive: \"#808000\",
    • olivedrab: \"#6b8e23\",
    • orange: \"#ffa500\",
    • orangered: \"#ff4500\",
    • orchid: \"#da70d6\",
    • palegoldenrod: \"#eee8aa\",
    • palegreen: \"#98fb98\",
    • paleturquoise: \"#afeeee\",
    • palevioletred: \"#db7093\",
    • papayawhip: \"#ffefd5\",
    • peachpuff: \"#ffdab9\",
    • peru: \"#cd853f\",
    • pink: \"#ffc0cb\",
    • plum: \"#dda0dd\",
    • powderblue: \"#b0e0e6\",
    • purple: \"#800080\",
    • rebeccapurple: \"#663399\",
    • red: \"#ff0000\",
    • rosybrown: \"#bc8f8f\",
    • royalblue: \"#4169e1\",
    • saddlebrown: \"#8b4513\",
    • salmon: \"#fa8072\",
    • sandybrown: \"#f4a460\",
    • seagreen: \"#2e8b57\",
    • seashell: \"#fff5ee\",
    • sienna: \"#a0522d\",
    • silver: \"#c0c0c0\",
    • skyblue: \"#87ceeb\",
    • slateblue: \"#6a5acd\",
    • slategray: \"#708090\",
    • slategrey: \"#708090\",
    • snow: \"#fffafa\",
    • springgreen: \"#00ff7f\",
    • steelblue: \"#4682b4\",
    • tan: \"#d2b48c\",
    • teal: \"#008080\",
    • thistle: \"#d8bfd8\",
    • tomato: \"#ff6347\",
    • turquoise: \"#40e0d0\",
    • violet: \"#ee82ee\",
    • wheat: \"#f5deb3\",
    • white: \"#ffffff\",
    • whitesmoke: \"#f5f5f5\",
    • yellow: \"#ffff00\",
    • yellowgreen: \"#9acd32\",
  • mode: str: Mode to use for the image. Default is 'RGBA'.

  • size: tuple: Size of the image in pixels (width, height). Default is (1, 1) for changing size afterwards.
  • fps: float, optional: Frames per second for the video clip.
  • duration: float, optional: Duration of the video clip in seconds.
Attributes:
  • color: str | tuple[int, ...]: The color of the video clip.
  • mode: str: The mode of the video clip.
  • Other attributes are inherited from the Data2ImageClip class.
Methods:

set_size(self, size: tuple[int, int]):

Set the size of the video clip.

Parameters: size: tuple[int, int]: New size of the video clip in pixels (width, height). Example Usage:
color_clip.set_size((800, 600))\n
Example Usage:
# Create a red square video clip (500x500, 30 FPS, 5 seconds):\nred_square = ColorClip(color='red', size=(500, 500), fps=30, duration=5)\n\n# Create a blue fullscreen video clip (1920x1080, default FPS and duration):\nblue_fullscreen = ColorClip(color='blue', size=(1920, 1080))\n\n# Create a green transparent video clip (RGBA mode, 800x600):\ngreen_transparent = ColorClip(color=(0, 255, 0, 0), mode='RGBA', size=(800, 600))\n
"},{"location":"reference_manual/clips/video_clips/imageclips/#textclip","title":"TextClip","text":"class vidiopy.TextClip(text: str, font_pth: None | str = None, font_size: int = 20, txt_color: str | tuple[int, ...] = (255, 255, 255, 0), bg_color: str | tuple[int, ...] = (0, 0, 0, 0), fps=None, duration=None)

Bases: #!py vidiopy.Data2ImageClip

A class representing a text clip to be used in video compositions.

Parameters:
  • text (str): The text content to be displayed in the clip.
  • font_pth (None | str, optional): The file path to the TrueType font file (.ttf). If None, the default system font is used. Defaults to None.
  • font_size (int, optional): The font size for the text. Defaults to 20.
  • txt_color (str | tuple[int, ...], optional): The color of the text specified as either a string (e.g., 'white') or a tuple representing RGBA values. Defaults to (255, 255, 255, 0) (fully transparent white).
  • bg_color (str | tuple[int, ...], optional): The background color of the text clip, specified as either a string (e.g., 'black') or a tuple representing RGBA values. Defaults to (0, 0, 0, 0) (fully transparent black).
  • fps (float, optional): Frames per second of the video. If None, the value is inherited from the parent class. Defaults to None.
  • duration (float, optional): Duration of the video clip in seconds. If None, the value is inherited from the parent class. Defaults to None.
Attributes:
  • font (PIL.ImageFont.FreeTypeFont): The font object used for rendering the text.
  • image (PIL.Image.Image): The image containing the rendered text.
  • fps (float): Frames per second of the video clip.
  • duration (float): Duration of the video clip in seconds.
  • Other attributes are inherited from the Data2ImageClip class.
Example Usage:
# Create a TextClip with custom text and styling\ntext_clip = TextClip(\"Contribute to Vidiopy\", font_size=30, txt_color='red', bg_color='blue', fps=24, duration=5.0)\n\n# Use the text clip in a video composition\ncomposition = CompositeVideoClip([other_clip, text_clip])\ncomposition.write_videofile(\"output.mp4\", codec='libx264', fps=24)\n
"},{"location":"reference_manual/clips/video_clips/imagesequenceclip/","title":"ImageSequenceClip","text":"

class vidiopy.VideoClip.ImageSequenceClip

Bases: vidiopy.VideoClip.VideoClip(sequence, fps=None, duration=None, audio=None)

A class used to represent a sequence of images as a video clip. This class extends the VideoClip class and provides additional functionality for handling sequences of images.

Attributes:
  • clip (tuple[Image.Image, ...]): The sequence of images as a tuple of PIL Images.

  • It inherits all the attributes from the VideoClip class.

Parameters:
  • sequence (str | Path | tuple[Image.Image, ...] | tuple[np.ndarray, ...] | tuple[str | Path, ...]): The sequence to import. It can be a tuple of PIL Images, paths to images, numpy arrays, or a path to a directory.
  • fps (int | float | None, optional): The frames per second of the image sequence clip. If not specified, it is calculated from the duration and the number of images in the sequence.
  • duration (int | float | None, optional): The duration of the image sequence clip in seconds. If not specified, it is calculated from the fps and the number of images in the sequence.
  • audio (optional): The audio of the image sequence clip. If not specified, the image sequence clip will have no audio.
Methods:

make_frame_array(t)

Generates a numpy array representation of a specific frame in the image sequence clip.

This method calculates the index of the frame for a specific time, retrieves the frame from the image sequence clip, and converts it to a numpy array.

Parameters: - t (int | float): The time of the frame to convert.

Returns: - np.ndarray: The numpy array representation of the frame.

Requires: - duration or end to be set.

make_frame_pil(t)

Generates a PIL Image representation of a specific frame in the image sequence clip.

This method calculates the index of the frame for a specific time, retrieves the frame from the image sequence clip, and returns it as a PIL Image.

Parameters: - t (int | float): The time of the frame to convert.

Returns: - Image.Image: The PIL Image representation of the frame.

Raises: - ValueError: If neither the duration nor the end of the image sequence clip is set.

Requires: - duration or end to be set.

fl_frame_transform(func, *args, **kwargs)

Applies a function to each frame of the image sequence clip.

This method iterates over each frame in the image sequence clip, applies a function to it, and replaces the original frame with the result. The function is expected to take a PIL Image as its first argument and return a PIL Image.

Parameters: - func (Callable[..., Image.Image]): The function to apply to each frame. It should take a PIL Image as its first argument and return a PIL Image. - *args: Additional positional arguments to pass to the function. - **kwargs: Additional keyword arguments to pass to the function.

Returns: - ImageSequenceClip: The current instance of the ImageSequenceClip class.

Example:

>>> image_sequence_clip = ImageSequenceClip()\n>>> image_sequence_clip.fl_frame_transform(lambda frame: frame.rotate(90))\n

fl_clip_transform(func, *args, **kwargs)

Applies a function to each frame of the image sequence clip along with its timestamp.

This method iterates over each frame in the image sequence clip, applies a function to it and its timestamp, and replaces the original frame with the result. The function is expected to take a PIL Image and a float as its first two arguments and return a PIL Image.

Parameters: - func (Callable[..., Image.Image]): The function to apply to each frame. It should take a PIL Image and a float as its first two arguments and return a PIL Image. - *args: Additional positional arguments to pass to the function. - **kwargs: Additional keyword arguments to pass to the function.

Returns: - ImageSequenceClip: The current instance of the ImageSequenceClip class.

Raises: - ValueError: If the fps of the image sequence clip is not set.

Requires: - fps to be set.

Example:

>>> image_sequence_clip = ImageSequenceClip()\n>>> image_sequence_clip.fl_clip_transform(lambda frame, t: frame.rotate(90 * t))\n

"},{"location":"reference_manual/clips/video_clips/mixing_clips/","title":"CompositeVideoCLip","text":"

def composite_videoclips(clips: Sequence[VideoClip], fps: int | float | None = None, bg_color: tuple[int, ...] = (0, 0, 0, 0), use_bg_clip: bool = False, audio: bool = True, audio_fps=44100)

Composites multiple video clips into a single video clip.

This function takes a sequence of video clips and composites them into a single video clip. The clips are layered on top of each other in the order they appear in the sequence. The background of the composite clip can be a solid color or the first clip in the sequence. The function also handles the positioning of each clip in the composite clip and the audio of the composite clip.

Args:
  • clips: Sequence[VideoClip]: The sequence of video clips to composite.
  • fps (int | float | None, optional): The frames per second of the composite clip. If not specified, it is set to the maximum fps of the clips in the sequence or raises a ValueError if none of the clips have fps set.
  • bg_color (tuple[int, ...], optional): The background color of the composite clip as a tuple of integers representing RGBA values. Default is (0, 0, 0, 0) which is transparent.
  • use_bg_clip (bool, optional): Whether to use the first clip in the sequence as the background of the composite clip. Default is False.
  • audio (bool, optional): Whether to include audio in the composite clip. If True, the audio of the clips in the sequence is also composited. Default is True.
  • audio_fps (int, optional): The frames per second of the audio of the composite clip. Default is 44100.
Returns: ImageSequenceClip: The composite video clip as an instance of the ImageSequenceClip class. Raises:
  • ValueError: If neither fps nor duration is set for any of the clips in the sequence.
  • ValueError: If the position of a clip in the composite clip is not specified correctly.
  • TypeError: If the position of a clip in the composite clip is not of the correct type.
Example:
>>> clip1 = VideoClip(...)\n>>> clip2 = VideoClip(...)\n>>> composite_clip = composite_videoclips([clip1, clip2], fps=24)\n
Note: This function uses the `#!py ImageSequenceClip class to create the composite video clip and the composite_audioclips function to composite the audio of the clips."},{"location":"reference_manual/clips/video_clips/mixing_clips/#concatenatevideoclips","title":"ConcatenateVideoClips","text":"

def concatenate_videoclips(clips: Sequence[VideoClip], transparent: bool = False, fps: int | float | None = None, scaling_strategy: str = \"scale_same\", transition: ( VideoClip | Callable[[Image.Image, Image.Image, int | float], VideoClip] | None ) = None, audio: bool = True, audio_fps: int | None = None):

Concatenates multiple video clips into a single video clip.

This function takes a sequence of video clips and concatenates them into a single video clip. The clips are appended one after the other in the order they appear in the sequence. The function also handles the scaling of each clip in the concatenated clip and the audio of the concatenated clip.

Args:
  • clips (Sequence[VideoClip]): The sequence of video clips to concatenate.
  • transparent (bool, optional): Whether to use a transparent background for the concatenated clip. Default is False.
  • fps (int | float | None, optional): The frames per second of the concatenated clip. If not specified, it is set to the maximum fps of the clips in the sequence or raises a ValueError if none of the clips have fps set.
  • scaling_strategy (bool | None, optional): The scaling strategy to use for the clips in the concatenated clip. If 'scale_up', the clips are scaled up to fit the size of the concatenated clip. If 'scale_down', the clips are scaled down to fit the size of the concatenated clip. If 'scale_same', the clips are not scaled. Default is 'scale_same'.
  • transition (VideoClip | Callable[[Image.Image, Image.Image, int | float], VideoClip] | None, optional): The transition to use between the clips in the concatenated clip. If a VideoClip, it is used as the transition. If a callable, it is called with the last frame of the previous clip, the first frame of the next clip, and the duration of the transition to generate the transition. If None, no transition is used. Default is None.
  • audio (bool, optional): Whether to include audio in the concatenated clip. If True, the audio of the clips in the sequence is also concatenated. Default is True.
  • audio_fps (int | None, optional): The frames per second of the audio of the concatenated clip. Default is None.
Returns: ImageSequenceClip: The concatenated video clip as an instance of the ImageSequenceClip class.

Raises: - ValueError: If neither fps nor duration is set for any of the clips in the sequence. - ValueError: If the size of a clip in the concatenated clip is not specified correctly. - TypeError: If the scaling strategy of a clip in the concatenated clip is not of the correct type.

Example:
>>> clip1 = VideoClip(...)\n>>> clip2 = ImageClip(...)\n>>> concatenated_clip = concatenate_videoclips([clip1, clip2], fps=24)\n
Note: This function uses the ImageSequenceClip class to create the concatenated video clip and the concatenate_audioclips function to concatenate the audio of the clips."},{"location":"reference_manual/clips/video_clips/videoclip/","title":"VideoClip","text":"

class vidiopy.VideoClip.VideoClip

Base: vidiopy.Clip.Clip

A VideoClip is a Base Class for all Video And Image clips (VideoFileClip, ImageClip and ImageSequenceClip)

See VideoFileClip, ImageClip etc. for more user-friendly classes.

Attributes:

_st: float | int

The start time of the clip (in seconds).

_ed: float | int | None

The end time of the clip (in seconds).

_dur: float | int | None

The Duration of the clip (in seconds).

Warning: Not Real Duration

It Many not equal to video.end - video.start. It is the Original Duration In which Video Is imported or any thing else.

fps: float | int | None

The FPS(Frame per Second) of the Video.

size: tuple[int, int]

The size of the clip, (width,height), in pixels.

audio: AudioClip | None

Audio in the Video.

pos: Callable[[float | int], tuple[int | str | float, int | str | float]]

A function t->(x,y) where x,y is the position of the clip when it is composed with other clips. See VideoClip.set_pos for more details.

relative_pos: bool

A Bool Which Determine whether the pos will output a relative position or in pixel. Properties:

start: float | int

The start time of the clip (in seconds).

end: float | int | None

The end time of the clip (in seconds).

duration: float | int | None

The Duration of the clip (in seconds).

Warning: Not Real Duration

It Many not equal to video.end - video.start. It is the Original Duration In which Video Is imported or any thing else.

width | w: int

The width of the clip, in pixels.

height | h: int

The height of the clip, in pixels.

aspect_ratio: Fraction

The aspect ratio of the clip, (width / height). methods:

set_start(self, value: int | float) -> VideoClip

The set_start method is used to set the start time of the video clip. It Changes _st attribute of the VideoClip.

Args: value: int | float: The start time of the video clip. Returns: VideoClip: The instance of the VideoClip after setting the start time.

set_end(self, value: int | float) -> VideoClip

The set_end method is used to set the end time of the video clip. It Changes _ed attribute of the VideoClip.

Args: value: int | float: The end time of the video clip. Returns: VideoClip: The instance of the VideoClip after setting the end time.

set_duration(self, value: int | float) -> VideoClip

Setter for the duration of the video clip. it raises a ValueError since duration is not allowed to be set. but you can change the duration using clip._dur = value or the _set_duration method.

Args: dur: int | float: The duration to set for the video clip. Returns: NoReturn: Raises a ValueError since duration is not allowed to be set. Raises: ValueError: If an attempt is made to set the duration, a ValueError is raised.

_set_duration(self, value: int | float) -> VideoClip

Private method to set the duration of the video clip. It Changes _dur attribute of the VideoClip.

Args: value: int | float: The duration to set for the video clip. Returns: VideoClip: The instance of the VideoClip after setting the duration.

set_position(self, pos: (tuple[int | float | str, int | float | str] | list[int | float | str] | Callable[[float | int], tuple[int | float | str, int | float | str]]), relative=False) -> Self:

Sets the position of the video clip. This is useful for the concatenate method, where the position of the video clip is used to set it on other clip. This method allows the position of the video clip to be set either as a fixed tuple of coordinates, or as a function that returns a tuple of coordinates at each time. The position can be set as absolute or relative to the size of the clip using the relative.

Note:
  • It Should Be the coordinates of the Video on the top left corner.
  • If relative is True, the position should be between the 0.0 & 1.0.
  • If relative is False, the position should be between the 0 & width or height of the video.
Parameters: pos: tuple | Callable: The position to set for the video clip. This can be either:
  • a tuple of two integers or floats, representing the x and y coordinates of the position, or
  • a callable that takes a single float or integer argument (representing the time) and returns a tuple of two integers or floats, representing the x and y coordinates of the position.
relative (bool, optional): Whether the position is relative to the size of the clip. If True, the position is interpreted as a fraction of the clip's width and height. Defaults to False. Raises: TypeError: If pos is not a tuple or a callable. Returns: self: Returns the instance of the class.

set_audio(self, audio: AudioClip | None) -> Self:

Sets the audio for the video clip.

This method assigns the provided audio clip to the video clip. If the audio clip is not None, it also sets the start and end times of the audio clip to match the video clip's start and end times.

Parameters: audio: AudioClip | None: The audio clip to be set to the video clip. If None, no audio is set. Returns: Self: Returns the instance of the class with updated audio clip.

without_audio(self) -> Self:

Removes the audio from the current VideoClip instance.

This method sets the 'audio' attribute of the VideoClip instance to None, effectively removing any audio that the clip might have.

Returns: VideoClip: The same instance of the VideoClip but without any audio. This allows for method chaining. Example:
>>> clip = VideoClip(...)\n>>> clip_without_audio = clip.without_audio()\n
Note: This method modifies the VideoClip instance in-place. If you want to keep the original clip with audio, consider making a copy before calling this method.

set_fps(self, fps: int | float) -> Self:

Set the frames per second (fps) for the video clip.

This method allows you to set the fps for the video clip. The fps value determines how many frames are shown per second during playback. A higher fps value results in smoother video playback.

Parameters: fps: int | float: The frames per second value to set. This can be an integer or a float. For example, a value of 24 would mean 24 frames are shown per second. Raises: TypeError: If the provided fps value is not an integer or a float. Returns: Self: Returns the instance of the class, allowing for method chaining. Example:
>>> clip = VideoClip()\n>>> clip.set_fps(24)\n

make_frame_array(self, t) -> np.ndarray:

Generate a frame at time t as a NumPy array.

This method is intended to be overridden in subclasses. It should return a NumPy array representing the frame at the given time.

Parameters: t: float: The time at which to generate the frame. Raises: NotImplementedError: If the method is not overridden in a subclass. Returns: np.ndarray: A NumPy array representing the frame at time t. Example:
>>> clip = VideoClipSubclass()\n>>> frame = clip.make_frame_array(0.5)\n

make_frame_pil(self, t) -> np.ndarray:

Generate a frame at time t as a NumPy array.

This method is intended to be overridden in subclasses. It should return a PIL representing the frame at the given time.

Parameters: t: float: The time at which to generate the frame. Raises: NotImplementedError: If the method is not overridden in a subclass. Returns: np.ndarray: A NumPy array representing the frame at time t. Example:
>>> clip = VideoClipSubclass()\n>>> frame = clip.make_frame_pil(0.5)\n

get_frame(self, t: int | float, is_pil=None) -> np.ndarray | Image.Image:

Get a frame at time t.

This method returns a frame at the given time t. The frame can be returned as a NumPy array or a PIL Image, depending on the value of is_pil.

Parameters: t: int | float: The time at which to get the frame. is_pil (bool, optional): If True, the frame is returned as a PIL Image. If False or None, the frame is returned as a NumPy array. Defaults to None. Raises: ValueError: If is_pil is not True, False, or None. Returns: np.ndarray | Image.Image: The frame at time t as a NumPy array or a PIL Image. Example:
>>> clip = VideoClip()\n>>> frame_array = clip.get_frame(0.5)\n>>> frame_pil = clip.get_frame(0.5, is_pil=True)\n

iterate_frames_pil_t(self, fps: int | float) -> Generator[Image.Image, Any, None]:

Iterate over frames as PIL Images at a given frames per second (fps).

This method generates frames at a given fps as PIL Images. The frames are generated from the start of the clip to the end or duration, whichever is set.

Parameters: fps: int | float: The frames per second at which to generate frames. Raises: ValueError: If neither end nor duration is set. Yields: Image.Image: The next frame as a PIL Image. Example:
>>> clip = VideoClip()\n>>> for frame in clip.iterate_frames_pil_t(24):\n...     # Do something with frame\n

iterate_frames_array_t(self, fps: int | float) -> Generator[np.ndarray, Any, None]:

Iterate over frames as NumPy arrays at a given frames per second (fps).

This method generates frames at a given fps as NumPy arrays. The frames are generated from the start of the clip to the end or duration, whichever is set.

Parameters: fps: int | float: The frames per second at which to generate frames. Raises: ValueError: If neither end nor duration is set. Yields: np.ndarray: The next frame as a NumPy array. Example:
>>> clip = VideoClip()\n>>> for frame in clip.iterate_frames_array_t(24):\n...     # Do something with frame\n

sub_clip_copy(self, t_start: int | float | None = None, t_end: int | float | None = None) -> Self:

Returns a subclip of the clip.copy, starting at time t_start (in seconds).

Parameters: t_start: int | float | None, optional: The start time of the subclip in seconds. Defaults to None. t_end: int | float | None, optional: The end time of the subclip in seconds. Defaults to None. Returns: Self: The subclip of the clip. Raises: NotImplementedError: If the method is not overridden in a subclass. Example:
>>> clip = VideoClip()\n>>> subclip = clip.sub_clip_copy(t_start=1.5, t_end=3.5)\n

sub_clip(self, t_start: int | float | None = None, t_end: int | float | None = None) -> Self:

Returns a subclip of the clip, starting at time t_start and ending at time t_end.

Parameters: t_start: int | float | None, optional: The start time of the subclip in seconds. Defaults to None. t_end: int | float | None, optional: The end time of the subclip in seconds. Defaults to None. Returns: Self: The subclip of the clip. Raises: NotImplementedError: If the method is not overridden in a subclass. Example:
>>> clip = VideoClip()\n>>> subclip = clip.sub_clip(t_start=1.5, t_end=3.5)\n

fl_frame_transform(self, func, *args, **kwargs) -> Self:

Apply a frame transformation function to each frame of the video clip.

This method calls the provided function func on each frame of the clip and applies the transformation. The transformed frames are then stored in a list and assigned back to the clip.

Parameters: func: The frame transformation function to be applied. *args: Additional positional arguments to be passed to the transformation function. **kwargs: Additional keyword arguments to be passed to the transformation function. Returns: Self: The modified video clip object. Example:
>>> def grayscale(frame):\n>>>     # Convert frame to grayscale\n>>>     return cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n>>>\n>>> clip = VideoClip()\n>>> clip.fl_frame_transform(grayscale)\n
Note: This method is meant to be overridden in the subclass. If not overridden, it raises a NotImplementedError. The transformation function func should accept a single frame as the first argument and return the transformed frame.

fl_time_transform(self, func_t: Callable[[int | float], int | float]) -> Self:

Apply a time transformation function to the clip.

This method modifies the make_frame_array and make_frame_pil methods to apply a time transformation function func_t to the time t before generating the frame. This can be used to speed up, slow down, or reverse the clip, among other things.

If the clip has audio, the same time transformation is applied to the audio.

Parameters: func_t (Callable[[int | float], int | float]): The time transformation function to apply. This function should take a time t and return a new time. Returns: Self: Returns the instance of the class, allowing for method chaining. Example:
>>> clip = VideoClip()\n>>> clip.fl_time_transform(lambda t: 2*t)  # Speed up the clip by a factor of 2\n

fx(self, func: Callable[..., Self], *args, **kwargs) -> Self:

Apply an effect function to the clip.

This method applies an effect function func to the clip. The effect function should take the clip as its first argument, followed by any number of positional and keyword arguments.

The effect function should return a new clip, which is then returned by this method.

Parameters: func (Callable[..., Self]): The effect function to apply. This function should take the clip as its first argument, followed by any number of positional and keyword arguments. *args: Positional arguments to pass to the effect function. **kwargs: Keyword arguments to pass to the effect function. Returns: Self: The new clip returned by the effect function. Example:
>>> clip = VideoClip()\n>>> clip.fx(effect_function, arg1, arg2, kwarg1=value1)\n

sub_fx(self, func: Callable[..., Self], *args, start_t: int | float | None = None, end_t: int | float | None = None, **kwargs) -> Self:

Apply an effect function to a subclip of the clip.

This method creates a subclip from start_t to end_t, applies an effect function func to the subclip, and returns the modified subclip.

The effect function should take the clip as its first argument, followed by any number of positional and keyword arguments.

Parameters: func (Callable[..., Self]): The effect function to apply. This function should take the clip as its first argument, followed by any number of positional and keyword arguments. *args: Positional arguments to pass to the effect function. start_t (int | float | None, optional): The start time of the subclip. If None, the start of the clip is used. Defaults to None. end_t (int | float | None, optional): The end time of the subclip. If None, the end of the clip is used. Defaults to None. **kwargs: Keyword arguments to pass to the effect function. Returns: Self: The modified subclip. Example:
>>> clip = VideoClip()\n>>> subclip = clip.sub_fx(effect_function, arg1, arg2, start_t=1, end_t=2, kwarg1=value1)\n

_sync_audio_video_s_e_d(self) -> Self:

Synchronizes the audio and video start, end, and duration attributes.

This method is used to ensure that the audio and video parts of a clip are in sync. It sets the start, end, and original duration of the audio to match the video.

Returns: Self: Returns the instance of the class with updated audio attributes. Raises: None Example:
>>> video_clip = VideoClip()\n>>> video_clip._sync_audio_video_s_e_d()\n
Note: This is an internal method, typically not meant to be used directly by the user.

write_videofile(self, filename, fps=None, codec=None, bitrate=None, audio=True, audio_fps=44100, preset=\"medium\", pixel_format=None, audio_codec=None, audio_bitrate=None, threads=None, ffmpeg_params: dict[str, str] | None = None, logger=\"bar\", over_write_output=True) -> Self:

Writes the video clip to a file.

This method generates video frames, processes them, and writes them to a file. If audio is present in the clip, it is also written to the file.

Args: filename (str): The name of the file to write. fps (int, optional): The frames per second to use for the output video. If not provided, the fps of the video clip is used. codec (str, optional): The codec to use for the output video. bitrate (str, optional): The bitrate to use for the output video. audio (bool, optional): Whether to include audio in the output video. Defaults to True. audio_fps (int, optional): The frames per second to use for the audio. Defaults to 44100. preset (str, optional): The preset to use for the output video. Defaults to \"medium\". pixel_format (str, optional): The pixel format to use for the output video. audio_codec (str, optional): The codec to use for the audio. audio_bitrate (str, optional): The bitrate to use for the audio. threads (int, optional): The number of threads to use for writing the video file. ffmpeg_params (dict[str, str] | None, optional): Additional parameters to pass to ffmpeg. logger (str, optional): The logger to use. Defaults to \"bar\". over_write_output (bool, optional): Whether to overwrite the output file if it already exists. Defaults to True. Returns: Self: Returns the instance of the class. Raises: Exception: If fps is not provided and not set in the video clip. Example:
>>> video_clip = VideoClip()\n>>> video_clip.write_videofile(\"output.mp4\")\n
Note: This method uses ffmpeg to write the video file.

write_videofile_subclip(self, filename, start_t: int | float | None = None, end_t: int | float | None = None, fps=None, codec=None, bitrate=None, audio=True, audio_fps=44100, preset=\"medium\", pixel_format=None, audio_codec=None, audio_bitrate=None, write_logfile=False, verbose=True, threads=None, ffmpeg_params: dict[str, str] | None = None, logger=\"bar\", over_write_output=True) -> Self:

Writes a subclip of the video clip to a file.

This method generates video frames for a specific part of the video (subclip), processes them, and writes them to a file. If audio is present in the clip, it is also written to the file.

Args: filename (str): The name of the file to write. start_t (int | float | None, optional): The start time of the subclip. If not provided, the start of the video is used. end_t (int | float | None, optional): The end time of the subclip. If not provided, the end of the video is used. fps (int, optional): The frames per second to use for the output video. If not provided, the fps of the video clip is used. codec (str, optional): The codec to use for the output video. bitrate (str, optional): The bitrate to use for the output video. audio (bool, optional): Whether to include audio in the output video. Defaults to True. audio_fps (int, optional): The frames per second to use for the audio. Defaults to 44100. preset (str, optional): The preset to use for the output video. Defaults to \"medium\". pixel_format (str, optional): The pixel format to use for the output video. audio_codec (str, optional): The codec to use for the audio. audio_bitrate (str, optional): The bitrate to use for the audio. write_logfile (bool, optional): Whether to write a logfile. Defaults to False. verbose (bool, optional): Whether to print verbose output. Defaults to True. threads (int, optional): The number of threads to use for writing the video file. ffmpeg_params (dict[str, str] | None, optional): Additional parameters to pass to ffmpeg. logger (str, optional): The logger to use. Defaults to \"bar\". over_write_output (bool, optional): Whether to overwrite the output file if it already exists. Defaults to True. Returns: Self: Returns the instance of the class. Raises: Exception: If fps is not provided and not set in the video clip. Example:
>>> video_clip = VideoClip()\n>>> video_clip.write_videofile_subclip(\"output.mp4\", start_t=10, end_t=20)\n
Note: This method uses ffmpeg to write the video file.

write_image_sequence(self, nformat: str, fps: int | float | None = None, dir=\".\") -> Self:

Writes the frames of the video clip as an image sequence.

This method generates video frames, processes them, and writes them as images to a directory. The images are named by their frame number and the provided format.

Args: nformat (str): The format to use for the output images. fps (int | float | None, optional): The frames per second to use for the output images. If not provided, the fps of the video clip is used. dir (str, optional): The directory to write the images to. Defaults to the current directory. Returns: Self: Returns the instance of the class. Raises: ValueError: If fps is not provided and fps and duration are not set in the video clip. Example:
>>> video_clip = VideoClip()\n>>> video_clip.write_image_sequence(\"png\", fps=24, dir=\"frames\")\n
Note: This method uses ffmpeg to write the images.

save_frame(self, t: int | float, filename: str) -> Self:

Saves a specific frame of the video clip as an image.

This method generates a video frame for a specific time, processes it, and writes it as an image to a file.

Args: t (int | float): The time of the frame to save. filename (str): The name of the file to write. Returns: Self: Returns the instance of the class. Example:
>>> video_clip = VideoClip()\n>>> video_clip.save_frame(10, \"frame10.png\")\n
Note: This method uses ffmpeg to write the image.

to_ImageClip(self, t: int | float):

Converts a specific frame of the video clip to an ImageClip.

This method generates a video frame for a specific time, processes it, and converts it to an ImageClip.

Args: t (int | float): The time of the frame to convert. Returns: Data2ImageClip: The converted ImageClip. Raises: None Example:
>>> video_clip = VideoClip()\n>>> image_clip = video_clip.to_ImageClip(10)\n
Note: This method uses ffmpeg to generate the frame and then converts it to an ImageClip."},{"location":"reference_manual/clips/video_clips/videofileclip/","title":"VideoFileClip","text":"

class vidiopy.VideoFileClip(filename: str, audio: bool = True, ffmpeg_options: dict | None = None)

Bases: vidiopy.VideoClip

All Methods and properties of the VideoClip class are available.

A video clip originating from a Video file.

Parameters:

filename: str

The name of the video file, as a string or a path-like object. It can have any extension supported by ffmpeg. All Sported extensions
  • .mp4
  • .avi
  • .mov
  • .mkv
  • .webm
  • .flv
  • .wmv
  • .3gp
  • .ogg
  • .ogv
  • .mts
  • .m2ts
  • .ts
  • .vob
  • .mpg
  • .mpeg
  • .m2v
  • .m4v
  • .mxf
  • .dv
  • .f4v
  • .gif
  • .mp3
  • .wav
  • .flac
  • .ogg
  • .m4a
  • .wma
  • .aac
  • .ac3
  • .alac
  • .aiff
  • .amr
  • .au
  • .mka
  • .mp2
  • .mpa
  • .opus
  • .ra
  • .tta
  • .wv
  • .weba
  • .webm
  • .webvtt
  • .srt ETC.

audio: bool Default: True

Set to False if the clip doesn\u2019t have any audio or if you do not wish to read the audio.

ffmpeg_options: dict | None Default: None

A dictionary of options to be passed to ffmpeg when generating the clip\u2019s audio. If None, the default options will be used. If you want to pass options to the video part of the clip, you will have to use the vidiopy.VideoFileClip.set_make_frame method. Attributes:

clip:

The Numpy array of the clip\u2019s video frames.

Read docs for Clip() and VideoClip() for other, more generic, attributes.

Methods:

fl_frame_transform(self, func, *args, **kwargs) -> Self:

Applies a function to each frame of the video clip.

This method iterates over each frame in the video clip, applies a function to it, and replaces the original frame with the result.

Args: func (callable): The function to apply to each frame. It should take an Image as its first argument, and return an Image. *args: Additional positional arguments to pass to func. **kwargs: Additional keyword arguments to pass to func. Returns: Self: Returns the instance of the class with updated frames. Raises: None Example:
>>> video_clip = VideoClip()\n>>> def invert_colors(image):\n...     return ImageOps.invert(image)\n>>> video_clip.fl_frame_transform(invert_colors)\n
Note: This method requires the start and end of the video clip to be set.

fl_clip_transform(self, func, *args, **kwargs) -> Self:

Applies a function to each frame of the video clip along with its timestamp.

This method iterates over each frame in the video clip, applies a function to it and its timestamp, and replaces the original frame with the result.

Args: func (callable): The function to apply to each frame. It should take an Image and a float (representing the timestamp) as its first two arguments, and return an Image. *args: Additional positional arguments to pass to func. **kwargs: Additional keyword arguments to pass to func. Returns: Self: Returns the instance of the class with updated frames. Raises: None Example:
>>> video_clip = VideoClip()\n>>> def add_timestamp(image, timestamp):\n...     draw = ImageDraw.Draw(image)\n...     draw.text((10, 10), str(timestamp), fill=\"white\")\n...     return image\n>>> video_clip.fl_clip_transform(add_timestamp)\n
Note: This method requires the fps of the video clip to be set.

make_frame_array(self, t: int | float) -> np.ndarray:

Generates a numpy array representation of a specific frame in the video clip.

This method calculates the index of the frame for a specific time, retrieves the frame from the video clip, and converts it to a numpy array.

Args: t (int | float): The time of the frame to convert. Returns: np.ndarray: The numpy array representation of the frame. Raises: ValueError: If the duration of the video clip is not set. Example:
>>> video_clip = VideoClip()\n>>> frame_array = video_clip.make_frame_array(10)\n
Note: This method requires the duration of the video clip to be set.

make_frame_pil(self, t: int | float) -> Image.Image:

Generates a PIL Image representation of a specific frame in the video clip.

This method calculates the index of the frame for a specific time, retrieves the frame from the video clip, and returns it as a PIL Image.

Args: t (int | float): The time of the frame to convert. Returns: Image.Image: The PIL Image representation of the frame. Raises: ValueError: If the duration of the video clip is not set. Example:
>>> video_clip = VideoClip()\n>>> frame_image = video_clip.make_frame_pil(10)\n
Note: This method requires the duration of the video clip to be set.

_import_video_clip(self, file_name: str, ffmpeg_options: dict | None = None) -> tuple:

Imports a video clip from a file using ffmpeg.

This method reads a video file using ffmpeg, converts each frame to a PIL Image, and returns a tuple of the images and the fps of the video.

Args: file_name (str): The name of the video file to import. ffmpeg_options (dict | None, optional): Additional options to pass to ffmpeg. Defaults to None. Returns: tuple: A tuple of the frames as PIL Images and the fps of the video. Raises: None Example:
>>> video_clip = VideoClip()\n>>> frames, fps = video_clip._import_video_clip(\"video.mp4\")\n
Note: This method uses ffmpeg to read the video file. It is a private method and not intended for external use."}]} \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 0000000..4b7379d --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,103 @@ + + + + https://github.com/SohamTilekar/vidiopy/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/getting_started/basic_concepts/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/getting_started/download_install/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/getting_started/mixing_clip/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/getting_started/quick_presentation/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/getting_started/read%26write/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/more/CONTRIBUTING/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/more/code%20style%20guide/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/reference_manual/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/clip/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/audio_clips/audioarrayclip/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/audio_clips/audioclip/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/audio_clips/audiofileclip/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/audio_clips/mixingaudio/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/audio_clips/silenceclip/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/video_clips/imageclips/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/video_clips/imagesequenceclip/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/video_clips/mixing_clips/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/video_clips/videoclip/ + 2024-04-12 + daily + + + https://github.com/SohamTilekar/vidiopy/reference_manual/clips/video_clips/videofileclip/ + 2024-04-12 + daily + + \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz new file mode 100644 index 0000000..e864f49 Binary files /dev/null and b/sitemap.xml.gz differ diff --git a/stylesheets/extra.css b/stylesheets/extra.css new file mode 100644 index 0000000..384fb1d --- /dev/null +++ b/stylesheets/extra.css @@ -0,0 +1,33 @@ +[data-md-color-scheme="slate"] { + --md-code-hl-number-color: #B5CEA8; + --md-code-hl-special-color: #DCDCAA; + --md-code-hl-function-color: #DCDCAA; + --md-code-hl-constant-color: #569CD6; + --md-code-hl-keyword-color: #C586C0; + --md-code-hl-string-color: #CE9178; + --md-code-hl-name-color: #9CDCFE; + --md-code-hl-operator-color: #D4D4D4; + --md-code-hl-punctuation-color: #D4D4D4; + --md-code-hl-comment-color: #6A9955; + --md-code-hl-generic-color: #9CDCFE; + --md-code-hl-variable-color: #9CDCFE; +} + +[data-md-color-scheme="default"] { + --md-code-hl-number-color: #007acc; /* Darker blue */ + --md-code-hl-special-color: #a31515; /* Darker red */ + --md-code-hl-function-color: #6f42c1; /* Darker purple */ + --md-code-hl-constant-color: #0366d6; /* Darker blue */ + --md-code-hl-keyword-color: #d73a49; /* Darker red */ + --md-code-hl-string-color: #28a745; /* Darker green */ + --md-code-hl-name-color: #005cc5; /* Darker blue */ + --md-code-hl-operator-color: #333; /* Dark grey */ + --md-code-hl-punctuation-color: #333; /* Dark grey */ + --md-code-hl-comment-color: #6a737d; /* Darker grey */ + --md-code-hl-generic-color: #005cc5; /* Darker blue */ + --md-code-hl-variable-color: #005cc5; /* Darker blue */ +} + +.md-typeset iframe.giscus-frame { + color-scheme: light dark !important; +} \ No newline at end of file

compositing / Concatenating clips