Record and visualize xy-path of Slider2D movement and triggered playback

hey,

im using a Slider2D and a MIDI Controller (Joystick) to control some Synth parameters and record the xy MIDI data to Ableton Live. These end up on two MIDI channels for x and y. Then i would adjust the MIDI data in Ableton Live and would like to play it back synchronized to Events in SC. How can i do that?

Because the two channels of MIDI Data are not really visualizing the xy movement via Slider2D, I thought another method would be to store and visualize the Slider2D movement in SC and play it back in SC via a triggered ramp or with Events. But then you cant easily ajdust the recorded MIDI Data.

I was imagining something like this for visualisation:

Unbenannt

Any thoughts or best practice with this approach?

Here is the basic Slider2D MIDI setup:

(
x = Environment.make { |self|

	~bufXY = Buffer.alloc(s, 2);

	~makeGui = { |self|
		var win, pointView, graphView;

		win = Window("test", Rect(10, 500, 440, 320)).front;

		self.xySlider = Slider2D()
		.background_(Color.white.alpha_(0))
		.action_{ |view|
			self.setLatent(0, view.x, view.y)
		};

		SkipJack({ self.xySlider.setXY(*self.coords.asArray)
		}.defer, 0.01, { self.xySlider.isClosed });

		graphView = StackLayout(self.xySlider, View().layout_(
			VLayout(pointView).margins_(10)
		)).mode_(\stackAll);

		win.layout = HLayout([graphView, stretch: 2]);

	};

	~setLatent = { |self, n ...coords|
		self.coords = self.coords ?? { Order[] };
		coords.do { |c, k| self.coords[n + k] = c };
		self.bufXY.setn(n, coords);
	};

	~mapMidi = { |self, ccs|
		self.midiResponders = self.midiResponders ?? { Order[] };
		self.unmapMidi;
		ccs.do { |cc, n|
			self.midiResponders[cc] = MIDIFunc.cc({ |v, c|
				self.setLatent(n, v / 127)
			}, cc).fix
		}

	};

	~unmapMidi = { |self| self.midiResponders.do(_.free) };

}.know_(true);

x.mapMidi([0, 1]);
x.makeGui;
)

re synchronizing Ableton and SC, I have experience with these three options:

  1. make a locator for your start point in the timeline, and MIDI map this locator and the play button so that Ableton starts playing from this point when you start your SC sequence

  2. make a MIDI clip containing your automation and MIDI map this clip so it starts playing when you start your SC sequence

  3. continuously send the playhead position (in beats) using Max for Live like this:


    and respond appropriately in SC by playing the correct stuff at the correct time. (a bit more work to engineer but also more flexible in that you can start playing anywhere in the middle of the piece and everything will sync up, also non-linear playback is possible.)

I don’t think any of these methods will result in perfect sync but in my experience all are quite usable.

for visualizing/editing in SC, I wonder if someone has made something like this before, a 2D envelope editor?

it wouldn’t be too hard, I think, you’d need to store a sequence of timestamped x@y values, and then figure out how to play, display, and edit them as you want. (these last two are where I imagine the difficulty being – because it’s really a 3D plot of x and y over time… you’d probably want different modes, where you could edit in x/y space and also in timeline mode)

oh, also have you looked into IanniX? (not sure whether or not it can do what you describe)
https://www.iannix.org/en/whatisiannix/

ok… well I started trying some things out.

in case it’s useful to you, here’s a sketch (recording midi cc 0 and 1 as x/y data, playing it back and visualizing , no editing yet):

( 
// Synth with x/y parameters
s.waitForBoot {
  ~synth = { 
    var x = \x.kr(0.5, 0.01); // add a little lag time to smooth sudden jumps
    var y = \y.kr(0.5, 0.01);
    var filtsig = RLPF.ar(WhiteNoise.ar, y.linexp(0, 1, 50, 5000));
    Pan2.ar(filtsig, x.linlin(0, 1, -1, 1));
  }.play;
};
)

(
// basic midi control
MIDIClient.init;
MIDIIn.connectAll;
MIDIdef.cc(\x, { |val| ~xfunc.value(val/127) }, 0);
MIDIdef.cc(\y, { |val| ~yfunc.value(val/127) }, 1);

// a little extra overhead so we can record later
~x = 0.5;
~y = 0.5;

~xfunc = { |x| ~synth.set(\x, x); ~x = x; };
~yfunc = { |y| ~synth.set(\y, y); ~y = y; };
)

( 
// start recording -- this could be improved to not poll constantly while there's no action
var recStartTime = Main.elapsedTime;
var resolution = 0.01; // time resolution, in seconds
~sequence = [];

~recordingRout = fork {
  // every resolution seconds, check value of ~x and ~y and if they've changed, log their new positions.
  var prevX, prevY;
  inf.do {
    var curTime = Main.elapsedTime;
    if ((~x != prevX) or: (~y != prevY)) {
      ~sequence = ~sequence.add((time: (curTime - recStartTime).round(resolution), x: ~x, y: ~y));
      prevX = ~x;
      prevY = ~y;
    };
    resolution.wait;
  };
};
)

// stop recording
~recordingRout.stop

~sequence.size

(
// play recording
fork {
  var now = 0.0;
  ~sequence.do { |point|
    (point.time - now).wait;
    now = point.time;
    ~xfunc.value(point.x);
    ~yfunc.value(point.y);
  };
}
)

(
// visualize recording as time graph of x and y
var win, canvas;
var width = 1000;
var height = 500;
var colors = [Color.red, Color.blue];

var timeToPixels = { |time| (time / ~sequence.last.time) * width };
var pointToPixels = { |point, axis = \x| (timeToPixels.(point.time)) @ (height * point[axis]) };

//Window.closeAll;
win = Window("", Rect(0, 0, width, height)).front;
canvas = UserView(win, Rect(0, 0, width, height)).drawFunc_({
  Pen.use {
    (~sequence.size - 1).do { |i|
      var point = ~sequence[i];
      var nextPoint = ~sequence[i + 1];
      [\x, \y].do { |axis, i|
        var pointPixels = pointToPixels.(point, axis);
        var nextPointPixels = pointToPixels.(nextPoint, axis);
        var midPointPixels = nextPointPixels.x@pointPixels.y;
        Pen.color_(colors[i]);
        Pen.moveTo(pointPixels);
        Pen.lineTo(midPointPixels);
        Pen.lineTo(nextPointPixels);
        Pen.stroke;
      };
    }
  }
});
)

(
// visualize recording as x/y position trace
var win, canvas;
var width = 700;
var height = 700;

var pointToPixels = { |point| (width * point.x) @ (height * point.y) };

//Window.closeAll;
win = Window("", Rect(0, 0, width, height)).front;
canvas = UserView(win, Rect(0, 0, width, height)).drawFunc_({
  Pen.use {
    var curPointPixels = pointToPixels.(~x@~y);
    (~sequence.size - 1).do { |i|
      var point = ~sequence[i];
      var nextPoint = ~sequence[i + 1];
      var pointPixels = pointToPixels.(point);
      var nextPointPixels = pointToPixels.(nextPoint);
      var timeNormalized = point.time / ~sequence.last.time;
      Pen.color_(Color.hsv(timeNormalized * 0.8, 1, 0.9));
      Pen.width_(3);
      Pen.moveTo(pointPixels);
      Pen.lineTo(nextPointPixels);
      Pen.stroke;
    };
    // draw circle at current 
    Pen.addOval(Rect(curPointPixels.x - 10, curPointPixels.y - 10, 20, 20));
    Pen.fillColor_(Color.white);
    Pen.strokeColor_(Color.black);
    Pen.fillStroke;
  };
});

~win = win;
)


(
// play recording with x/y visuals following along
fork {
  var now = 0.0;
  ~sequence.do { |point|
    (point.time - now).wait;
    now = point.time;
    ~xfunc.value(point.x);
    ~yfunc.value(point.y);
    defer { ~win.refresh };
  };
}
)

looks like this, e.g.:

thanks for your help. i will check it out in the upcomming days.
In the book about gen~ generating sound and organising time are some really interesting attempts for creating orbits to navigate a multidimensional space in chapter 9.
These have the navigation of wave terrain synthesis in mind but could also be used for navigating any other space. In my case for the MLPRegressor from the FluCoMa toolkit. I will try to build some of those in SC.