const KEYS = [ 'arrowup', 'arrowdown', 'arrowleft', 'arrowright', '1', '2', '3', '4', 'w', 'a', 's', 'd', 'f', 'g', ' ', 'display' ] // Checks if all items on a dictionary are false function isAllFalse(dict) { return Object.keys(dict).reduce((acc, key) => { if (dict[key]) acc = false return acc }, true) } // Clear all uploads, crops and selection function clearAll(state) { state.images = KEYS.reduce((acc, key) => { acc[key] = null return acc }, {}) state.sounds = KEYS.reduce((acc, key) => { acc[key] = null return acc }, {}) state.soundCrops = KEYS.reduce((acc, key) => { acc[key] = 0.0 return acc }, {}) state.soundAmplitudes = KEYS.reduce((acc, key) => { acc[key] = [] return acc }, {}) state.keyPressed = KEYS.reduce((acc, key) => { acc[key] = false return acc }, {}) } // Stop all playing sounds function stopAll(state) { KEYS.forEach((key) => { const sound = state.sounds[key] if (sound && sound.state === 'started') { sound.stop() } }) } // adapted from https://gist.github.com/also/900023 // returns Uint8Array of WAV header bytes function getWavHeader(options) { const numFrames = options.numFrames const numChannels = options.numChannels || 2 const sampleRate = options.sampleRate || 44100 const bytesPerSample = options.isFloat ? 4 : 2 const format = options.isFloat ? 3 : 1 const blockAlign = numChannels * bytesPerSample const byteRate = sampleRate * blockAlign const dataSize = numFrames * blockAlign const buffer = new ArrayBuffer(44) const dv = new DataView(buffer) let p = 0 function writeString(s) { for (let i = 0; i < s.length; i++) { dv.setUint8(p + i, s.charCodeAt(i)) } p += s.length } function writeUint32(d) { dv.setUint32(p, d, true) p += 4 } function writeUint16(d) { dv.setUint16(p, d, true) p += 2 } writeString('RIFF') // ChunkID writeUint32(dataSize + 36) // ChunkSize writeString('WAVE') // Format writeString('fmt ') // Subchunk1ID writeUint32(16) // Subchunk1Size writeUint16(format) // AudioFormat writeUint16(numChannels) // NumChannels writeUint32(sampleRate) // SampleRate writeUint32(byteRate) // ByteRate writeUint16(blockAlign) // BlockAlign writeUint16(bytesPerSample * 8) // BitsPerSample writeString('data') // Subchunk2ID writeUint32(dataSize) // Subchunk2Size return new Uint8Array(buffer) } // https://stackoverflow.com/questions/62172398/convert-audiobuffer-to-arraybuffer-blob-for-wav-download // Returns Uint8Array of WAV bytes function getWavBytes(buffer, options) { const type = options.isFloat ? Float32Array : Uint16Array const numFrames = buffer.byteLength / type.BYTES_PER_ELEMENT const headerBytes = getWavHeader(Object.assign({}, options, { numFrames })) const wavBytes = new Uint8Array(headerBytes.length + buffer.byteLength); // prepend header, then add pcmBytes wavBytes.set(headerBytes, 0) wavBytes.set(new Uint8Array(buffer), headerBytes.length) return wavBytes } // https://www.codegrepper.com/code-examples/javascript/how+to+convert+data+uri+in+array+buffer function dataURItoArrayBuffer(dataURI) { // convert base64 to raw binary data held in a string // doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this var byteString = atob(dataURI.split(',')[1]); // separate out the mime component var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0] // write the bytes of the string to an ArrayBuffer var ab = new ArrayBuffer(byteString.length); // create a view into the buffer var ia = new Uint8Array(ab); // set the bytes of the buffer to the correct values for (var i = 0; i < byteString.length; i++) { ia[i] = byteString.charCodeAt(i); } return ab // // write the ArrayBuffer to a blob, and you're done // var blob = new Blob([ab], {type: mimeString}); // return blob; } // Extract files from zip file and put them on the state async function importZip(state, zipFile) { console.log("zipFile", zipFile) // Read zip File into array buffer function readFileIntoArrayBuffer(zipFile) { return new Promise(function (resolve, reject) { let reader = new FileReader() reader.onload = function (e) { resolve(e.target.result) } reader.readAsArrayBuffer(zipFile) }) } let zipBuffer = await readFileIntoArrayBuffer(zipFile) let zip = await JSZip.loadAsync(zipBuffer, { type: 'arrayBuffer' }) console.log('zip', zip) let filenames = Object.keys(zip.files) let promises = filenames.map((filename) => { return zip.file(filename).async('arraybuffer') }) let filesBuffer = await Promise.all(promises) // console.log('buffers', filesBuffer) let files = filesBuffer.map((buffer, i) => { let file = new File([buffer], filenames[i]) return file }) let importPromises = files.map((file) => { let filename = file.name // if first character of filename is an underscore, prepend a space character to it // windows removes leading whitespace from filenames when it creates zips if (filename[0] === '_') { filename = ' ' + filename } let [key, type, name] = filename.split('_') type = type.split('.')[0] if (type === 'sound') { return new Promise((resolve, reject) => { const urlObj = URL.createObjectURL(file) Tone.start().then(() => { const player = new Tone.Player({ url: urlObj, onload: () => { state.soundAmplitudes[key] = [] state.sounds[key] = player URL.revokeObjectURL(urlObj) resolve() }, onerror: (e) => { console.log('error', e) state.soundAmplitudes[key] = [] state.sounds[key] = null reject() } }).toDestination() }) }) } if (type === 'image') { return new Promise((resolve, reject) => { let reader = new FileReader() state.imageFiles[key] = file reader.onload = function (e) { state.images[key] = e.target.result resolve() } reader.readAsDataURL(file) }) } }) return Promise.all(importPromises) } function loadExample(state, emitter, id) { //fetch the example zip file and then import it fetch(`examples/${id}.zip`).then((response) => { console.log(response) return response.blob() } ).then((blob) => { console.log(blob) importZip(state, blob) .then(() => { state.waiting = false emitter.emit('render') }) state.waiting = true emitter.emit('render') } ) // the firs time any key is pressed or released or mouse is clicked reload the example } const examples = ['tutorial', 'valentine'] function store(state, emitter) { //if the url has a hash, load the project if (window.location.hash) { let id = window.location.hash.slice(1) if (examples.includes(id)) { //add an overlay over the whole page that says 'click here' and prevents the user from interacting in any way state.clickHere = true // add an event listenre so the next time the page is clicked, load the example const onClick = () => { state.clickHere = false state.waiting = true emitter.emit('render') loadExample(state, emitter, id) window.removeEventListener('click', onClick) } const clickListener = window.addEventListener('click', onClick) } } // Dictionary of images: { key: url } state.images = {} // Dictionary of image files: { key: File } state.imageFiles = {} // Dictionary of sounds: { key: Tone.Player } state.sounds = {} // Dictionary of pressed keys: { key: bool } state.keyPressed = {} // Dictionary of sound crops (in seconds): { start } state.soundCrops = {} // Dictionary of sound amplitutes: { key: value } state.soundAmplitudes = {} // Initialize dictionaries clearAll(state) // Image url data to display state.displayData = '' // Currently selected key state.selectedKey = null // Camera stream state.cameraStream = null // Audio recording state.recorder = null state.mic = null state.meter = null state.recordingInterval = 0 state.fullscreen = false // Loading/Waiting screen state.waiting = false emitter.on('key-up', (key) => { if (state.waiting) return false if (KEYS.indexOf(key) !== -1) { state.keyPressed[key] = false if (state.sounds[key]) { state.sounds[key].stop() } if (state.images[key]) { // If there is nothing being pressed or you are lifting a key // that has an image, the display should be cleared if (isAllFalse(state.keyPressed) || key === state.selectedKey) { state.displayData = '' } else { // Otherwise, if all the keys that are being pressed are audio-only // then you also want to clear the display let pressedKeys = KEYS.filter((key) => state.keyPressed[key]) let hasImage = pressedKeys.find((key) => state.images[key]) if (!hasImage) { state.displayData = '' } } } emitter.emit('render') } }) emitter.on('key-down', (key) => { if (state.waiting) return false if (KEYS.indexOf(key) !== -1) { emitter.emit('select-key', key) if (!state.keyPressed[key]) { if (state.images[key]) { state.displayData = state.images[key] } if (state.sounds[key]) { let sound = state.sounds[key] sound.stop() let startAt = (state.soundCrops[state.selectedKey] || 0) * sound.buffer.duration sound.start(0, startAt) } } state.keyPressed[key] = true emitter.emit('render') } }) window.state = state emitter.on('select-display', () => { console.log('select-display') if (state.selectedKey === 'display') { state.selectedKey = null } else { state.selectedKey = 'display' } emitter.emit('render') }) emitter.on('deselect-display', () => { console.log("deslect-display") if (state.selectedKey === 'display') { state.selectedKey = null } emitter.emit('render') }) emitter.on('select-key', (key) => { state.isDisplaySelected = false state.selectedKey = key emitter.emit('render') }) emitter.on('clear', () => { state.selectedKey = null stopAll(state) clearAll(state) emitter.emit('render') }) emitter.on('upload', () => { let imageFormats = ['.png', '.jpg', '.jpeg', '.gif', '.webp'] let input = html`` input.addEventListener('change', (e) => { if (!e.target.files || !e.target.files[0]) return false let file = e.target.files[0] if (file) { let ext = file.name.split('.').pop() ext = ext.toLowerCase() if (imageFormats.indexOf(`.${ext}`) !== -1) { let reader = new FileReader() state.imageFiles[state.selectedKey] = file reader.onload = function (e) { state.images[state.selectedKey] = e.target.result emitter.emit('render') } reader.readAsDataURL(file) } if (soundFormats.indexOf(`.${ext}`) !== -1) { // Generating audio object const urlObj = URL.createObjectURL(file) Tone.start().then(() => { const player = new Tone.Player({ url: urlObj, onload: () => { state.soundAmplitudes[state.selectedKey] = [] state.sounds[state.selectedKey] = player emitter.emit('render') URL.revokeObjectURL(urlObj) }, onerror: (e) => { console.log('error', e) state.soundAmplitudes[state.selectedKey] = [] state.sounds[state.selectedKey] = null emitter.emit('render') } }).toDestination() }) } } }) input.click() }) emitter.on('upload-image', () => { let imageFormats = ['.png', '.jpg', '.jpeg', '.gif', '.webp'] let input = html`` input.addEventListener('change', (e) => { if (!e.target.files || !e.target.files[0]) return false let file = e.target.files[0] if (file) { let ext = file.name.split('.').pop() ext = ext.toLowerCase() if (imageFormats.indexOf(`.${ext}`) !== -1) { let reader = new FileReader() state.imageFiles[state.selectedKey] = file reader.onload = function (e) { state.images[state.selectedKey] = e.target.result state.waiting = false emitter.emit('render') } reader.readAsDataURL(file) state.waiting = true emitter.emit('render') } } }) input.click() }) emitter.on('upload-sound', () => { const soundFormats = ['.mp3', '.wav', '.ogg', '.webm', '.m4a'] let input = html`` input.addEventListener('change', (e) => { if (!e.target.files || !e.target.files[0]) return false let file = e.target.files[0] if (file) { let ext = file.name.split('.').pop() ext = ext.toLowerCase() if (soundFormats.indexOf(`.${ext}`) !== -1) { // Generating audio object const urlObj = URL.createObjectURL(file) Tone.start().then(() => { const player = new Tone.Player({ url: urlObj, onload: () => { state.soundAmplitudes[state.selectedKey] = [] state.sounds[state.selectedKey] = player state.waiting = false emitter.emit('render') URL.revokeObjectURL(urlObj) }, onerror: (e) => { console.log('error', e) state.soundAmplitudes[state.selectedKey] = [] state.sounds[state.selectedKey] = null emitter.emit('render') } }).toDestination() }) state.waiting = true emitter.emit('render') } } }) input.click() }) emitter.on('remove-image', () => { state.images[state.selectedKey] = null state.cameraStream = null emitter.emit('render') }) emitter.on('remove-sound', () => { emitter.emit('stop-sound') state.sounds[state.selectedKey] = null emitter.emit('render') }) emitter.on('capture-image', () => { const video = html`` video.srcObject = state.cameraStream video.addEventListener('playing', () => { const canvas = html`` canvas.width = video.videoWidth canvas.height = video.videoHeight canvas.getContext('2d').drawImage(video, 0, 0) state.images[state.selectedKey] = canvas.toDataURL() emitter.emit('render') }) }) emitter.on('download-image', () => { if (state.cameraStream) { emitter.emit('capture-image') } const link = document.createElement("a") link.href = state.images[state.selectedKey] link.download = `${state.selectedKey}_image.png` link.click() }) emitter.on('start-camera-stream', () => { navigator.mediaDevices.getUserMedia({ video: true }) .then((stream) => { state.cameraStream = stream emitter.emit('render') }) .catch((err) => { console.log("error", err) state.cameraStream = null emitter.emit('render') }); }) emitter.on('play-sound', () => { const sound = state.sounds[state.selectedKey] if (sound) { sound.stop() let startAt = (state.soundCrops[state.selectedKey] || 0) * sound.buffer.duration sound.start(0, startAt) } emitter.emit('render') }) emitter.on('stop-sound', () => { const sound = state.sounds[state.selectedKey] if (sound) { sound.stop() } emitter.emit('render') }) emitter.on('start-recording', () => { emitter.emit('stop-sound') navigator.mediaDevices.getUserMedia({ audio: true }) .then((stream) => { if (state.recorder) { state.soundCrops[state.selectedKey] = 0 state.soundAmplitudes[state.selectedKey] = [] state.soundAmplitudes[state.selectedKey].push(0) state.mic.open() .then(() => { state.recorder.start() window.addEventListener( 'mouseup', () => emitter.emit('stop-recording'), { once: true } ) clearInterval(state.recordingInterval) state.recordingInterval = setInterval(() => { state.soundAmplitudes[state.selectedKey].push( 100 + state.meter.getValue() ) emitter.emit('render') }, 100) emitter.emit('render') }).catch(e => { console.log("mic not open", e) }) } else { Tone.start().then(() => { state.recorder = new Tone.Recorder() state.meter = new Tone.Meter() state.mic = new Tone.UserMedia() state.mic.connect(state.meter) state.mic.connect(state.recorder) emitter.emit('render') }) } }) .catch((err) => { console.log("mic not open", err) }) }) emitter.on('stop-recording', () => { if (state.recorder && state.recorder.state === 'started') { clearInterval(state.recordingInterval) state.recorder.stop() .then(blob => blob.arrayBuffer()) .then((buffer) => { const ctx = Tone.getContext() return ctx.decodeAudioData(buffer) }) .then((audioBuffer) => { const recordedData = new Tone.ToneAudioBuffer(audioBuffer) const sound = new Tone.Player(recordedData).toDestination() state.sounds[state.selectedKey] = sound emitter.emit('render') }) .catch((err) => { console.log('error', err) }) } }) emitter.on('download-sound', () => { const sound = state.sounds[state.selectedKey] if (!sound) return const crop = state.soundCrops[state.selectedKey] || 0 let slicedBuffer = sound.buffer.slice(crop * sound.buffer.duration) const float32 = slicedBuffer.getChannelData(0) const buff = getWavBytes(float32.buffer, { isFloat: true, numChannels: 1, sampleRate: Tone.getContext().sampleRate }) let blob = new Blob([buff], { type: 'audio/x-wav' }) saveAs(blob, `${state.selectedKey}_sound.wav`) }) emitter.on('toggle-fullscreen', () => { if (state.fullscreen) { state.fullscreen = false if (document.fullscreen) { document.exitFullscreen() } } else { state.fullscreen = true document.documentElement.requestFullscreen() } emitter.emit('render') }) emitter.on('handle-crop-click', (e) => { const bounds = e.target.getBoundingClientRect() state.soundCrops[state.selectedKey] = (e.clientX - bounds.left) / bounds.width emitter.emit('render') }) emitter.on('export-zip', () => { if (isAllFalse(state.sounds) && isAllFalse(state.images)) return const pack = new JSZip() // PACK SOUNDS KEYS.forEach((key) => { const sound = state.sounds[key] if (!sound) return const crop = (state.soundCrops[state.selectedKey] || 0) * sound.buffer.duration let slicedBuffer = sound.buffer.slice(crop) const float32 = slicedBuffer.getChannelData(0) const buff = getWavBytes(float32.buffer, { isFloat: true, numChannels: 1, sampleRate: Tone.getContext().sampleRate }) pack.file(`${key}_sound.wav`, buff) }) // PACK IMAGES KEYS.forEach((key) => { let dataURI = state.images[key] let imageFile = state.imageFiles[key] if (!dataURI) return let buffer = dataURItoArrayBuffer(dataURI) if (imageFile) { // If it's an uploaded image pack.file(`${key}_image_${imageFile.name}`, buffer) } else { pack.file(`${key}_image.png`, buffer) } }) pack.generateAsync({ type: "blob" }) .then(function (content) { saveAs(content, `${Date.now()}_poster.zip`); }) }) emitter.on('import-zip', () => { stopAll(state) let input = html`` input.addEventListener('change', (e) => { // Interrupt execution if no file was selected if (!e.target.files || !e.target.files[0]) return false // Read ArrayBuffer into JSZip instance let file = e.target.files[0] importZip(state, file) .then(() => { state.waiting = false emitter.emit('render') }) state.waiting = true emitter.emit('render') }) input.click() }) emitter.on('start-tutorial', () => { state.isTutorialOpen = true emitter.emit('render') }) emitter.on('exit-tutorial', () => { state.isTutorialOpen = false emitter.emit('render') }) window.addEventListener('keydown', (e) => { let key = e.key.toLowerCase() if (KEYS.indexOf(key) !== -1) e.preventDefault() emitter.emit('key-down', key) }) window.addEventListener('keyup', (e) => { let key = e.key.toLowerCase() if (KEYS.indexOf(key) !== -1) e.preventDefault() emitter.emit('key-up', key) }) window.addEventListener('resize', () => { emitter.emit('render') }) }