import * as scrawl from '../source/scrawl.js';
import { reportSpeed } from './utilities.js';import * as scrawl from '../source/scrawl.js';
import { reportSpeed } from './utilities.js';const canvas = scrawl.findCanvas('mycanvas');Namespacing boilerplate
const namespace = canvas.name;
const name = (n) => `${namespace}-${n}`;Build out the filters
scrawl.makeFilter({
name: name('grayscale'),
method: 'grayscale',
}).clone({
name: name('sepia'),
method: 'sepia',
}).clone({
name: name('invert'),
method: 'invert',
}).clone({
name: name('red'),
method: 'red',
});
scrawl.makeFilter({
name: name('pixelate'),
method: 'pixelate',
tileWidth: 20,
tileHeight: 20,
offsetX: 8,
offsetY: 8,
});
scrawl.makeFilter({
name: name('background-blur'),
method: 'gaussianBlur',
radius: 20,
});
scrawl.makeFilter({
name: name('body-blur'),
method: 'gaussianBlur',
radius: 10,
});We’ll handle everything in a raw asset object, which a Picture entity can then use as its source
const myAsset = scrawl.makeRawAsset({
name: name('tensorflow-model-interpreter'),
userAttributes: [{We’re only interested in the pixel allocations generated by the tensorflow model in this demo
key: 'data',
defaultValue: [],
setter: function (item) {
if (item && item.width && item.height && item.data) {
/** @ts-expect-error */
this.canvasWidth = item.width;
/** @ts-expect-error */
this.canvasHeight = item.height;
/** @ts-expect-error */
this.data = item.data;
/** @ts-expect-error */
this.dirtyData = true;
}
},
},{
key: 'canvasWidth',
defaultValue: 0,
setter: () => {},
},{
key: 'canvasHeight',
defaultValue: 0,
setter: () => {},
}],
updateSource: function (assetWrapper) {
const { element, engine, canvasWidth, canvasHeight, data } = assetWrapper;
if (canvasWidth && canvasHeight && data) {
const segLength = canvasWidth * canvasHeight,
imageDataLen = segLength * 4,
imageArray = new Uint8ClampedArray(imageDataLen);
for (let i = 0, o = 0; i < segLength; i++) {
o = (i * 4) + 3;
if (data[i]) imageArray[o] = 255;
}
const iData = new ImageData(imageArray, canvasWidth, canvasHeight);Clear the canvas, resizing it if required
element.width = canvasWidth;
element.height = canvasHeight;
engine.putImageData(iData, 0, 0);
}
},
});The forever loop function, which captures the tensorflow model’s output and passes it on to our raw asset for processing
const perform = function (net) {
net.segmentPerson(video.source)
.then(data => {
myAsset.set({data});
perform(net);
})
.catch(e => console.log(e));
};let video, myBackground, myOutline;Capture the media stream
scrawl.importMediaStream({
name: name('device-camera'),
video: {
width: { ideal: 600 },
height: { ideal: 400 },
facingMode: 'user',
},
})
.then(mycamera => {
video = mycamera;This fixes the issue in Firefox where the media stream will crash Tensorflow if the stream’s video element’s dimensions have not been set
/** @ts-expect-error */
video.source.width = "600";
/** @ts-expect-error */
video.source.height = "400";Take the media stream and display it in our canvas element
myBackground = scrawl.makePicture({
name: name('background'),
asset: mycamera.name,
order: 2,
width: '100%',
height: '100%',
copyWidth: '80%',
copyHeight: '80%',
copyStartX: '10%',
copyStartY: '10%',
globalCompositeOperation: 'destination-over',
});
myBackground.clone({
name: name('body'),
order: 1,
globalCompositeOperation: 'source-in',
});Start the TensorFlow model
/* eslint-disable */
/** @ts-expect-error */
bodyPix.load()
/* eslint-enable */
.then (net => {Display the visual generated by our raw asset
myOutline = scrawl.makePicture({
name: name('outline'),
asset: name('tensorflow-model-interpreter'),
order: 0,
width: '100%',
height: '100%',
copyWidth: '80%',
copyHeight: '80%',
copyStartX: '10%',
copyStartY: '10%',We blur here to make the outline merge into the background
filters: [name('body-blur')],
});Invoke the forever loop
perform(net);
})
.catch(e => console.log('ERROR: ', e));
})
.catch(err => console.log(err.message));Function to display frames-per-second data, and other information relevant to the demo
const report = reportSpeed('#reportmessage');Create the Display cycle animation
scrawl.makeRender({
name: name('animation'),
target: canvas,
afterShow: report,
});scrawl.initializeDomInputs([
['select', 'backgroundFilter', 0],
['select', 'outlineFilter', 1],
]);Event listeners
scrawl.addNativeListener(['input', 'change'], (e) => {
e.preventDefault();
e.returnValue = false;
if (e && e.target) {
const id = e.target.id,
val = e.target.value;
if ('backgroundFilter' === id) {
myBackground.clearFilters();
if (val) myBackground.addFilters(name(val));
}
else {
if ('1' === val) myOutline.addFilters(name('body-blur'));
else myOutline.clearFilters();
}
}
}, '.controlItem');Set DOM form initial input values
/** @ts-expect-error */
document.querySelector('#backgroundFilter').value = '';
/** @ts-expect-error */
document.querySelector('#outlineFilter').value = '1';console.log(scrawl.library);