1
0
Fork 0
mirror of https://github.com/ninjamuffin99/Funkin.git synced 2025-11-28 15:26:12 +00:00

audioVisualOffset numbers

This commit is contained in:
Cameron Taylor 2024-02-12 22:14:35 -05:00
parent 25766dbde9
commit e759f90147
2 changed files with 43 additions and 20 deletions

View file

@ -217,6 +217,11 @@ class Conductor
*/ */
public var inputOffset(get, set):Int; public var inputOffset(get, set):Int;
/**
* An offset set by the user to compensate for audio/visual lag
*/
public var audioVisualOffset(get, set):Int;
function get_inputOffset():Int function get_inputOffset():Int
{ {
return Save.get().options.inputOffset; return Save.get().options.inputOffset;
@ -229,6 +234,18 @@ class Conductor
return Save.get().options.inputOffset; return Save.get().options.inputOffset;
} }
function get_audioVisualOffset():Int
{
return Save.get().options.audioVisualOffset;
}
function set_audioVisualOffset(value:Int):Int
{
Save.get().options.audioVisualOffset = value;
Save.get().flush();
return Save.get().options.audioVisualOffset;
}
/** /**
* The number of beats in a measure. May be fractional depending on the time signature. * The number of beats in a measure. May be fractional depending on the time signature.
*/ */
@ -283,16 +300,17 @@ class Conductor
* *
* @param songPosition The current position in the song in milliseconds. * @param songPosition The current position in the song in milliseconds.
* Leave blank to use the FlxG.sound.music position. * Leave blank to use the FlxG.sound.music position.
* @param applyOffsets If it should apply the instrumentalOffset + formatOffset + audioVisualOffset
*/ */
public function update(?songPos:Float, applyOffsets:Bool = true) public function update(?songPos:Float, applyOffsets:Bool = true, forceDispatch:Bool = false)
{ {
if (songPos == null) if (songPos == null)
{ {
// Take into account instrumental and file format song offsets. // Take into account instrumental and file format song offsets.
songPos = (FlxG.sound.music != null) ? (FlxG.sound.music.time + instrumentalOffset + formatOffset) : 0.0; songPos = (FlxG.sound.music != null) ? (FlxG.sound.music.time + instrumentalOffset + formatOffset + audioVisualOffset) : 0.0;
} }
else else
songPos += applyOffsets ? instrumentalOffset + formatOffset : 0; songPos += applyOffsets ? instrumentalOffset + formatOffset + audioVisualOffset : 0;
var oldMeasure = this.currentMeasure; var oldMeasure = this.currentMeasure;
var oldBeat = this.currentBeat; var oldBeat = this.currentBeat;
@ -338,7 +356,7 @@ class Conductor
} }
// Only fire the signal if we are THE Conductor. // Only fire the signal if we are THE Conductor.
if (this == Conductor.instance) if (this == Conductor.instance || forceDispatch)
{ {
// FlxSignals are really cool. // FlxSignals are really cool.
if (currentStep != oldStep) if (currentStep != oldStep)
@ -428,7 +446,7 @@ class Conductor
} }
// Update currentStepTime // Update currentStepTime
this.update(Conductor.instance.songPosition); this.update(this.songPosition, false);
} }
/** /**
@ -540,13 +558,18 @@ class Conductor
} }
} }
public static function watchQuick():Void /**
* @param conductorToUse defaults to Conductor.instance if null
*/
public static function watchQuick(?conductorToUse:Conductor):Void
{ {
FlxG.watch.addQuick("songPosition", Conductor.instance.songPosition); if (conductorToUse == null) conductorToUse = Conductor.instance;
FlxG.watch.addQuick("bpm", Conductor.instance.bpm);
FlxG.watch.addQuick("currentMeasureTime", Conductor.instance.currentMeasureTime); FlxG.watch.addQuick("songPosition", conductorToUse.songPosition);
FlxG.watch.addQuick("currentBeatTime", Conductor.instance.currentBeatTime); FlxG.watch.addQuick("bpm", conductorToUse.bpm);
FlxG.watch.addQuick("currentStepTime", Conductor.instance.currentStepTime); FlxG.watch.addQuick("currentMeasureTime", conductorToUse.currentMeasureTime);
FlxG.watch.addQuick("currentBeatTime", conductorToUse.currentBeatTime);
FlxG.watch.addQuick("currentStepTime", conductorToUse.currentStepTime);
} }
/** /**

View file

@ -230,15 +230,15 @@ class LatencyState extends MusicBeatSubState
trace(FlxG.sound.music._channel.position); trace(FlxG.sound.music._channel.position);
*/ */
localConductor.update(swagSong.time, false); localConductor.update(swagSong.time, false, true);
// localConductor.songPosition += (Timer.stamp() * 1000) - FlxG.sound.music.prevTimestamp; // localConductor.songPosition += (Timer.stamp() * 1000) - FlxG.sound.music.prevTimestamp;
songPosVis.x = songPosToX(localConductor.songPosition); songPosVis.x = songPosToX(localConductor.songPosition);
songVisFollowAudio.x = songPosToX(localConductor.songPosition - localConductor.instrumentalOffset); songVisFollowAudio.x = songPosToX(localConductor.songPosition - localConductor.audioVisualOffset);
songVisFollowVideo.x = songPosToX(localConductor.songPosition - localConductor.inputOffset); songVisFollowVideo.x = songPosToX(localConductor.songPosition - localConductor.inputOffset);
visualOffsetText.text = "Visual Offset: " + localConductor.instrumentalOffset + "ms"; visualOffsetText.text = "Visual Offset: " + localConductor.audioVisualOffset + "ms";
visualOffsetText.text += "\nYou can press SPACE+Left/Right to change this value."; visualOffsetText.text += "\nYou can press SPACE+Left/Right to change this value.";
offsetText.text = "INPUT Offset (Left/Right to change): " + localConductor.inputOffset + "ms"; offsetText.text = "INPUT Offset (Left/Right to change): " + localConductor.inputOffset + "ms";
@ -260,12 +260,12 @@ class LatencyState extends MusicBeatSubState
{ {
if (FlxG.keys.justPressed.RIGHT) if (FlxG.keys.justPressed.RIGHT)
{ {
localConductor.instrumentalOffset += 1 * multiply; localConductor.audioVisualOffset += 1 * multiply;
} }
if (FlxG.keys.justPressed.LEFT) if (FlxG.keys.justPressed.LEFT)
{ {
localConductor.instrumentalOffset -= 1 * multiply; localConductor.audioVisualOffset -= 1 * multiply;
} }
} }
else else
@ -294,10 +294,10 @@ class LatencyState extends MusicBeatSubState
// localConductor.update(swagSong.getTimeWithDiff()); // localConductor.update(swagSong.getTimeWithDiff());
var inputLatencyMs:Float = haxe.Int64.toInt(PreciseInputManager.getCurrentTimestamp() - event.timestamp) / 1000.0 / 1000.0; var inputLatencyMs:Float = haxe.Int64.toInt(PreciseInputManager.getCurrentTimestamp() - event.timestamp) / 1000.0 / 1000.0;
trace("input latency: " + inputLatencyMs + "ms"); // trace("input latency: " + inputLatencyMs + "ms");
trace("cur timestamp: " + PreciseInputManager.getCurrentTimestamp() + "ns"); // trace("cur timestamp: " + PreciseInputManager.getCurrentTimestamp() + "ns");
trace("event timestamp: " + event.timestamp + "ns"); // trace("event timestamp: " + event.timestamp + "ns");
trace("songtime: " + localConductor.getTimeWithDiff(swagSong) + "ms"); // trace("songtime: " + localConductor.getTimeWithDiff(swagSong) + "ms");
var closestBeat:Int = Math.round(localConductor.getTimeWithDiff(swagSong) / (localConductor.stepLengthMs * 2)) % diffGrp.members.length; var closestBeat:Int = Math.round(localConductor.getTimeWithDiff(swagSong) / (localConductor.stepLengthMs * 2)) % diffGrp.members.length;
var getDiff:Float = localConductor.getTimeWithDiff(swagSong) - (closestBeat * (localConductor.stepLengthMs * 2)); var getDiff:Float = localConductor.getTimeWithDiff(swagSong) - (closestBeat * (localConductor.stepLengthMs * 2));