- Build a sphere tracer with WebGPU (paper, paper2, youtube)
- Create model with sdf functions from here
- Add light and shadows
- ???
- PROFIT
This code tested in Chrome and Firefox, should work on PC too. Press star and subscribe.
| # MIDI to CV converter for Raspberry Pi Pico and MCP4725 DAC by @AxWax | |
| # | |
| # Demo: https://www.youtube.com/watch?v=aGfQHL1jU4I | |
| # | |
| # This is heavily based on and requires | |
| # the SimpleMIDIDecoder library by @diyelectromusic, which can be found at | |
| # https://diyelectromusic.wordpress.com/2021/06/13/raspberry-pi-pico-midi-channel-router/ | |
| # | |
| # | |
| # Wiring: |
| function video_to_gif { | |
| local input_video_path="$1" | |
| local output_gif_path="$2" | |
| local fps="${3:-10}" | |
| local scale="${4:-1080}" | |
| local loop="${5:-0}" | |
| ffmpeg -i "${input_video_path}" -vf "setpts=PTS/1,fps=${fps},scale=${scale}:-2:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse" -loop $loop "${output_gif_path}" | |
| } |
| package org.ygl.openrndr.demos | |
| import org.openrndr.application | |
| import org.openrndr.color.ColorRGBa | |
| import org.openrndr.draw.BufferMultisample | |
| import org.openrndr.draw.DrawPrimitive | |
| import org.openrndr.draw.colorBuffer | |
| import org.openrndr.draw.renderTarget | |
| import org.openrndr.draw.shadeStyle | |
| import org.openrndr.draw.vertexBuffer |
| import org.openrndr.application | |
| import org.openrndr.color.ColorRGBa | |
| import org.openrndr.color.mix | |
| import org.openrndr.color.rgb | |
| import org.openrndr.draw.isolatedWithTarget | |
| import org.openrndr.draw.renderTarget | |
| import org.openrndr.extra.compositor.compose | |
| import org.openrndr.extra.compositor.draw | |
| import org.openrndr.extra.compositor.post |
| #include "FFT.h" | |
| void fft(int *x_in, | |
| std::complex<double> *x_out, | |
| int N) { | |
| // Make copy of array and apply window | |
| for (int i = 0; i < N; i++) { | |
| x_out[i] = std::complex<double>(x_in[i], 0); | |
| x_out[i] *= 1; // Window |
A particularly clear example of gamma correct (left) and incorrect (right) rendering. Source.
Best practices
.map, .emissiveMap, etc.) should be configured with .encoding = sRGBEncoding; all other textures use LinearEncoding.| import Service from '@ember/service'; | |
| const has = () => true; | |
| const { console, WeakMap, Proxy, Symbol } = window; | |
| const get = (target, key) => key === Symbol.unscopables ? undefined : target[key]; | |
| export default Service.extend({ |
| // Modified version of a tilt shift shader from Martin Jonasson (http://grapefrukt.com/) | |
| // Read http://notes.underscorediscovery.com/ for context on shaders and this file | |
| // License : MIT | |
| uniform sampler2D tex0; | |
| varying vec2 tcoord; | |
| varying vec4 color; | |
| /* | |
| Take note that blurring in a single pass (the two for loops below) is more expensive than separating |
Minimal example: transcode from MP3 to WMA:
ffmpeg -i input.mp3 output.wma
You can get the list of supported formats with:
ffmpeg -formats
Convert WAV to MP3, mix down to mono (use 1 audio channel), set bit rate to 64 kbps and sample rate to 22050 Hz: