diff --git a/docs/docs/video.md b/docs/docs/video.md
index 8ee1bd447c..1e692b8cba 100644
--- a/docs/docs/video.md
+++ b/docs/docs/video.md
@@ -9,10 +9,9 @@ React Native Skia provides a way to load video frames as images, enabling rich m
## Requirements
+- **Reanimated** version 3 or higher.
- **Android:** API level 26 or higher.
- **Video URL:** Must be a local path. We recommend using it in combination with [expo-asset](https://docs.expo.dev/versions/latest/sdk/asset/) to download the video.
-- **Animated Playback:** Available only via [Reanimated 3](/docs/animations/animations) and above.
-- **Sound Playback:** Coming soon. In the meantime, audio can be played using [expo-av](https://docs.expo.dev/versions/latest/sdk/av/).
## Example
diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock
index c8532d3fdb..2945186a34 100644
--- a/example/ios/Podfile.lock
+++ b/example/ios/Podfile.lock
@@ -356,6 +356,8 @@ PODS:
- React
- React-callinvoker
- React-Core
+ - react-native-slider (4.4.2):
+ - React-Core
- React-perflogger (0.71.7)
- React-RCTActionSheet (0.71.7):
- React-Core/RCTActionSheetHeaders (= 0.71.7)
@@ -511,6 +513,7 @@ DEPENDENCIES:
- React-logger (from `../node_modules/react-native/ReactCommon/logger`)
- react-native-safe-area-context (from `../node_modules/react-native-safe-area-context`)
- "react-native-skia (from `../node_modules/@shopify/react-native-skia`)"
+ - "react-native-slider (from `../node_modules/@react-native-community/slider`)"
- React-perflogger (from `../node_modules/react-native/ReactCommon/reactperflogger`)
- React-RCTActionSheet (from `../node_modules/react-native/Libraries/ActionSheetIOS`)
- React-RCTAnimation (from `../node_modules/react-native/Libraries/NativeAnimation`)
@@ -607,6 +610,8 @@ EXTERNAL SOURCES:
:path: "../node_modules/react-native-safe-area-context"
react-native-skia:
:path: "../node_modules/@shopify/react-native-skia"
+ react-native-slider:
+ :path: "../node_modules/@react-native-community/slider"
React-perflogger:
:path: "../node_modules/react-native/ReactCommon/reactperflogger"
React-RCTActionSheet:
@@ -687,6 +692,7 @@ SPEC CHECKSUMS:
React-logger: 3f8ebad1be1bf3299d1ab6d7f971802d7395c7ef
react-native-safe-area-context: dfe5aa13bee37a0c7e8059d14f72ffc076d120e9
react-native-skia: c2c416b864962e73d8b9c81f0fa399ee89c8435e
+ react-native-slider: 33b8d190b59d4f67a541061bb91775d53d617d9d
React-perflogger: 2d505bbe298e3b7bacdd9e542b15535be07220f6
React-RCTActionSheet: 0e96e4560bd733c9b37efbf68f5b1a47615892fb
React-RCTAnimation: fd138e26f120371c87e406745a27535e2c8a04ef
diff --git a/example/package.json b/example/package.json
index 73054d43b4..d076c81920 100644
--- a/example/package.json
+++ b/example/package.json
@@ -14,6 +14,7 @@
"android-reverse-tcp": "adb devices | grep '\t' | awk '{print $1}' | sed 's/\\s//g' | xargs -I {} adb -s {} reverse tcp:8081 tcp:8081"
},
"dependencies": {
+ "@react-native-community/slider": "4.4.2",
"@react-navigation/bottom-tabs": "6.5.7",
"@react-navigation/elements": "1.3.6",
"@react-navigation/native": "6.0.13",
diff --git a/example/src/Examples/Video/Video.tsx b/example/src/Examples/Video/Video.tsx
index 1c958fb04d..8528ea2d9b 100644
--- a/example/src/Examples/Video/Video.tsx
+++ b/example/src/Examples/Video/Video.tsx
@@ -4,45 +4,78 @@ import {
ColorMatrix,
Fill,
ImageShader,
+ Text,
+ useFont,
} from "@shopify/react-native-skia";
-import { Pressable, useWindowDimensions } from "react-native";
-import { useSharedValue } from "react-native-reanimated";
+import { Pressable, View, useWindowDimensions } from "react-native";
+import { useDerivedValue, useSharedValue } from "react-native-reanimated";
+import Slider from "@react-native-community/slider";
import { useVideoFromAsset } from "../../components/Animations";
export const Video = () => {
const paused = useSharedValue(false);
+ const seek = useSharedValue(0);
const { width, height } = useWindowDimensions();
- const { currentFrame } = useVideoFromAsset(
+ const fontSize = 20;
+ const font = useFont(require("../../assets/SF-Mono-Semibold.otf"), fontSize);
+ const { currentFrame, currentTime, duration } = useVideoFromAsset(
require("../../Tests/assets/BigBuckBunny.mp4"),
{
paused,
looping: true,
+ seek,
+ volume: 0,
}
);
+ const text = useDerivedValue(() => currentTime.value.toFixed(0));
return (
- (paused.value = !paused.value)}
- >
-
-
+
+
+
+ {
+ seek.value = value * duration;
+ paused.value = false;
+ }}
+ onSlidingStart={() => {
+ paused.value = true;
+ }}
+ />
+
+
);
};
diff --git a/example/yarn.lock b/example/yarn.lock
index 53dd8fcb71..00291751a9 100644
--- a/example/yarn.lock
+++ b/example/yarn.lock
@@ -2379,6 +2379,11 @@
resolved "https://registry.yarnpkg.com/@react-native-community/eslint-plugin/-/eslint-plugin-1.3.0.tgz#9e558170c106bbafaa1ef502bd8e6d4651012bf9"
integrity sha512-+zDZ20NUnSWghj7Ku5aFphMzuM9JulqCW+aPXT6IfIXFbb8tzYTTOSeRFOtuekJ99ibW2fUCSsjuKNlwDIbHFg==
+"@react-native-community/slider@4.4.2":
+ version "4.4.2"
+ resolved "https://registry.yarnpkg.com/@react-native-community/slider/-/slider-4.4.2.tgz#1fea0eb3ae31841fe87bd6c4fc67569066e9cf4b"
+ integrity sha512-D9bv+3Vd2gairAhnRPAghwccgEmoM7g562pm8i4qB3Esrms5mggF81G3UvCyc0w3jjtFHh8dpQkfEoKiP0NW/Q==
+
"@react-native/assets@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@react-native/assets/-/assets-1.0.0.tgz#c6f9bf63d274bafc8e970628de24986b30a55c8e"
diff --git a/package/android/cpp/rnskia-android/RNSkAndroidVideo.cpp b/package/android/cpp/rnskia-android/RNSkAndroidVideo.cpp
index c73bc4d79c..48cd9ea99d 100644
--- a/package/android/cpp/rnskia-android/RNSkAndroidVideo.cpp
+++ b/package/android/cpp/rnskia-android/RNSkAndroidVideo.cpp
@@ -83,7 +83,7 @@ double RNSkAndroidVideo::framerate() {
void RNSkAndroidVideo::seek(double timestamp) {
JNIEnv *env = facebook::jni::Environment::current();
jclass cls = env->GetObjectClass(_jniVideo.get());
- jmethodID mid = env->GetMethodID(cls, "seek", "(J)V");
+ jmethodID mid = env->GetMethodID(cls, "seek", "(D)V");
if (!mid) {
RNSkLogger::logToConsole("seek method not found");
return;
@@ -128,4 +128,36 @@ SkISize RNSkAndroidVideo::getSize() {
return SkISize::Make(width, height);
}
+void RNSkAndroidVideo::play() {
+ JNIEnv *env = facebook::jni::Environment::current();
+ jclass cls = env->GetObjectClass(_jniVideo.get());
+ jmethodID mid = env->GetMethodID(cls, "play", "()V");
+ if (!mid) {
+ RNSkLogger::logToConsole("play method not found");
+ return;
+ }
+ env->CallVoidMethod(_jniVideo.get(), mid);
+}
+
+void RNSkAndroidVideo::pause() {
+ JNIEnv *env = facebook::jni::Environment::current();
+ jclass cls = env->GetObjectClass(_jniVideo.get());
+ jmethodID mid = env->GetMethodID(cls, "pause", "()V");
+ if (!mid) {
+ RNSkLogger::logToConsole("pause method not found");
+ return;
+ }
+ env->CallVoidMethod(_jniVideo.get(), mid);
+}
+
+void RNSkAndroidVideo::setVolume(float volume) {
+ JNIEnv *env = facebook::jni::Environment::current();
+ jclass cls = env->GetObjectClass(_jniVideo.get());
+ jmethodID mid = env->GetMethodID(cls, "setVolume", "(F)V");
+ if (!mid) {
+ RNSkLogger::logToConsole("setVolume method not found");
+ return;
+ }
+ env->CallVoidMethod(_jniVideo.get(), mid, volume);
+}
} // namespace RNSkia
diff --git a/package/android/cpp/rnskia-android/RNSkAndroidVideo.h b/package/android/cpp/rnskia-android/RNSkAndroidVideo.h
index 0d18c47b74..4198d667f4 100644
--- a/package/android/cpp/rnskia-android/RNSkAndroidVideo.h
+++ b/package/android/cpp/rnskia-android/RNSkAndroidVideo.h
@@ -33,6 +33,9 @@ class RNSkAndroidVideo : public RNSkVideo {
void seek(double timestamp) override;
float getRotationInDegrees() override;
SkISize getSize() override;
+ void play() override;
+ void pause() override;
+ void setVolume(float volume) override;
};
} // namespace RNSkia
diff --git a/package/android/src/main/java/com/shopify/reactnative/skia/RNSkVideo.java b/package/android/src/main/java/com/shopify/reactnative/skia/RNSkVideo.java
index 63233123f2..701f1e7347 100644
--- a/package/android/src/main/java/com/shopify/reactnative/skia/RNSkVideo.java
+++ b/package/android/src/main/java/com/shopify/reactnative/skia/RNSkVideo.java
@@ -3,11 +3,15 @@
import android.content.Context;
import android.graphics.ImageFormat;
import android.hardware.HardwareBuffer;
-import android.media.Image;
-import android.media.ImageReader;
+import android.media.AudioAttributes;
+import android.media.AudioManager;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
+import android.media.MediaPlayer;
+import android.media.MediaSync;
+import android.media.Image;
+import android.media.ImageReader;
import android.net.Uri;
import android.os.Build;
import android.view.Surface;
@@ -28,12 +32,16 @@ public class RNSkVideo {
private MediaCodec decoder;
private ImageReader imageReader;
private Surface outputSurface;
+ private MediaPlayer mediaPlayer;
+ private MediaSync mediaSync;
private double durationMs;
private double frameRate;
private int rotationDegrees = 0;
private int width = 0;
private int height = 0;
+ private boolean isPlaying = false;
+
RNSkVideo(Context context, String localUri) {
this.uri = Uri.parse(localUri);
this.context = context;
@@ -50,6 +58,18 @@ private void initializeReader() {
}
extractor.selectTrack(trackIndex);
MediaFormat format = extractor.getTrackFormat(trackIndex);
+
+ // Initialize MediaPlayer
+ mediaPlayer = new MediaPlayer();
+ mediaPlayer.setDataSource(context, uri);
+ mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+ mediaPlayer.setOnPreparedListener(mp -> {
+ durationMs = mp.getDuration();
+ mp.start();
+ isPlaying = true;
+ });
+ mediaPlayer.prepareAsync();
+
// Retrieve and store video properties
if (format.containsKey(MediaFormat.KEY_DURATION)) {
durationMs = format.getLong(MediaFormat.KEY_DURATION) / 1000; // Convert microseconds to milliseconds
@@ -119,12 +139,30 @@ public HardwareBuffer nextImage() {
}
@DoNotStrip
- public void seek(long timestamp) {
- // Seek to the closest sync frame at or before the specified time
- extractor.seekTo(timestamp * 1000, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
+ public void seek(double timestamp) {
+ // Log the values for debugging
+
+ long timestampUs = (long)(timestamp * 1000); // Convert milliseconds to microseconds
+
+ extractor.seekTo(timestampUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ if (mediaPlayer != null) {
+ int timestampMs = (int) timestamp; // Convert to milliseconds
+ mediaPlayer.seekTo(timestampMs, MediaPlayer.SEEK_CLOSEST);
+ }
+
// Flush the codec to reset internal state and buffers
if (decoder != null) {
decoder.flush();
+
+ // Decode frames until reaching the exact timestamp
+ boolean isSeeking = true;
+ while (isSeeking) {
+ decodeFrame();
+ long currentTimestampUs = extractor.getSampleTime();
+ if (currentTimestampUs >= timestampUs) {
+ isSeeking = false;
+ }
+ }
}
}
@@ -187,7 +225,34 @@ private void decodeFrame() {
}
}
+ @DoNotStrip
+ public void play() {
+ if (mediaPlayer != null && !isPlaying) {
+ mediaPlayer.start();
+ isPlaying = true;
+ }
+ }
+
+ @DoNotStrip
+ public void pause() {
+ if (mediaPlayer != null && isPlaying) {
+ mediaPlayer.pause();
+ isPlaying = false;
+ }
+ }
+
+ @DoNotStrip
+ public void setVolume(float volume) {
+ if (mediaPlayer != null) {
+ mediaPlayer.setVolume(volume, volume);
+ }
+ }
+
public void release() {
+ if (mediaPlayer != null) {
+ mediaPlayer.release();
+ mediaPlayer = null;
+ }
if (decoder != null) {
decoder.stop();
decoder.release();
diff --git a/package/cpp/api/JsiVideo.h b/package/cpp/api/JsiVideo.h
index da0dcad8c5..6dbca1ef69 100644
--- a/package/cpp/api/JsiVideo.h
+++ b/package/cpp/api/JsiVideo.h
@@ -68,13 +68,28 @@ class JsiVideo : public JsiSkWrappingSharedPtrHostObject {
return result;
}
- JSI_EXPORT_FUNCTIONS(JSI_EXPORT_FUNC(JsiVideo, nextImage),
- JSI_EXPORT_FUNC(JsiVideo, duration),
- JSI_EXPORT_FUNC(JsiVideo, framerate),
- JSI_EXPORT_FUNC(JsiVideo, seek),
- JSI_EXPORT_FUNC(JsiVideo, rotation),
- JSI_EXPORT_FUNC(JsiVideo, size),
- JSI_EXPORT_FUNC(JsiVideo, dispose))
+ JSI_HOST_FUNCTION(play) {
+ getObject()->play();
+ return jsi::Value::undefined();
+ }
+
+ JSI_HOST_FUNCTION(pause) {
+ getObject()->pause();
+ return jsi::Value::undefined();
+ }
+
+ JSI_HOST_FUNCTION(setVolume) {
+ auto volume = arguments[0].asNumber();
+ getObject()->setVolume(static_cast(volume));
+ return jsi::Value::undefined();
+ }
+
+ JSI_EXPORT_FUNCTIONS(
+ JSI_EXPORT_FUNC(JsiVideo, nextImage), JSI_EXPORT_FUNC(JsiVideo, duration),
+ JSI_EXPORT_FUNC(JsiVideo, framerate), JSI_EXPORT_FUNC(JsiVideo, seek),
+ JSI_EXPORT_FUNC(JsiVideo, rotation), JSI_EXPORT_FUNC(JsiVideo, size),
+ JSI_EXPORT_FUNC(JsiVideo, play), JSI_EXPORT_FUNC(JsiVideo, pause),
+ JSI_EXPORT_FUNC(JsiVideo, setVolume), JSI_EXPORT_FUNC(JsiVideo, dispose))
JsiVideo(std::shared_ptr context,
std::shared_ptr video)
diff --git a/package/cpp/rnskia/RNSkVideo.h b/package/cpp/rnskia/RNSkVideo.h
index d02d76359e..924906497d 100644
--- a/package/cpp/rnskia/RNSkVideo.h
+++ b/package/cpp/rnskia/RNSkVideo.h
@@ -20,6 +20,9 @@ class RNSkVideo {
virtual void seek(double timestamp) = 0;
virtual float getRotationInDegrees() = 0;
virtual SkISize getSize() = 0;
+ virtual void play() = 0;
+ virtual void pause() = 0;
+ virtual void setVolume(float volume) = 0;
};
} // namespace RNSkia
diff --git a/package/ios/RNSkia-iOS/RNSkiOSVideo.h b/package/ios/RNSkia-iOS/RNSkiOSVideo.h
index 4344c63c86..31ed7287ef 100644
--- a/package/ios/RNSkia-iOS/RNSkiOSVideo.h
+++ b/package/ios/RNSkia-iOS/RNSkiOSVideo.h
@@ -21,16 +21,18 @@ namespace RNSkia {
class RNSkiOSVideo : public RNSkVideo {
private:
std::string _url;
- AVAssetReader *_reader = nullptr;
- AVAssetReaderTrackOutput *_trackOutput = nullptr;
+ AVPlayer *_player = nullptr;
+ AVPlayerItem *_playerItem = nullptr;
+ AVPlayerItemVideoOutput *_videoOutput = nullptr;
RNSkPlatformContext *_context;
double _duration = 0;
double _framerate = 0;
float _videoWidth = 0;
float _videoHeight = 0;
- void setupReader(CMTimeRange timeRange);
- NSDictionary *getOutputSettings();
CGAffineTransform _preferredTransform;
+ bool _isPlaying = false;
+ void setupPlayer();
+ NSDictionary *getOutputSettings();
public:
RNSkiOSVideo(std::string url, RNSkPlatformContext *context);
@@ -39,8 +41,11 @@ class RNSkiOSVideo : public RNSkVideo {
double duration() override;
double framerate() override;
void seek(double timestamp) override;
+ void play();
+ void pause();
float getRotationInDegrees() override;
SkISize getSize() override;
+ void setVolume(float volume);
};
} // namespace RNSkia
diff --git a/package/ios/RNSkia-iOS/RNSkiOSVideo.mm b/package/ios/RNSkia-iOS/RNSkiOSVideo.mm
index b18f199566..ed47544e3c 100644
--- a/package/ios/RNSkia-iOS/RNSkiOSVideo.mm
+++ b/package/ios/RNSkia-iOS/RNSkiOSVideo.mm
@@ -16,80 +16,60 @@
RNSkiOSVideo::RNSkiOSVideo(std::string url, RNSkPlatformContext *context)
: _url(std::move(url)), _context(context) {
- setupReader(CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity));
+ setupPlayer();
}
-RNSkiOSVideo::~RNSkiOSVideo() {}
+RNSkiOSVideo::~RNSkiOSVideo() {
+ if (_player) {
+ [_player pause];
+ }
+}
-void RNSkiOSVideo::setupReader(CMTimeRange timeRange) {
- NSError *error = nil;
+void RNSkiOSVideo::setupPlayer() {
+ NSURL *videoURL = [NSURL URLWithString:@(_url.c_str())];
+ AVPlayerItem *playerItem = [AVPlayerItem playerItemWithURL:videoURL];
+ _player = [AVPlayer playerWithPlayerItem:playerItem];
+ _playerItem = playerItem;
- AVURLAsset *asset =
- [AVURLAsset URLAssetWithURL:[NSURL URLWithString:@(_url.c_str())]
- options:nil];
- AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:asset
- error:&error];
- if (error) {
- NSLog(@"Error initializing asset reader: %@", error.localizedDescription);
- return;
- }
+ NSDictionary *outputSettings = getOutputSettings();
+ _videoOutput =
+ [[AVPlayerItemVideoOutput alloc] initWithOutputSettings:outputSettings];
+ [playerItem addOutput:_videoOutput];
- CMTime time = [asset duration];
- if (time.timescale == 0) {
- NSLog(@"Error: Timescale of the asset is zero.");
- return;
+ CMTime time = playerItem.asset.duration;
+ if (time.timescale != 0) {
+ _duration = CMTimeGetSeconds(time) * 1000; // Store duration in milliseconds
}
- _duration = CMTimeGetSeconds(time) * 1000; // Store duration in milliseconds
AVAssetTrack *videoTrack =
- [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
- _framerate = videoTrack.nominalFrameRate;
- _preferredTransform = videoTrack.preferredTransform;
- CGSize videoSize = videoTrack.naturalSize;
- _videoWidth = videoSize.width;
- _videoHeight = videoSize.height;
- NSDictionary *outputSettings = getOutputSettings();
- AVAssetReaderTrackOutput *trackOutput =
- [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack
- outputSettings:outputSettings];
-
- assetReader.timeRange = timeRange;
- if ([assetReader canAddOutput:trackOutput]) {
- [assetReader addOutput:trackOutput];
- [assetReader startReading];
- } else {
- NSLog(@"Cannot add output to asset reader.");
- return;
+ [[playerItem.asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
+ if (videoTrack) {
+ _framerate = videoTrack.nominalFrameRate;
+ _preferredTransform = videoTrack.preferredTransform;
+ CGSize videoSize = videoTrack.naturalSize;
+ _videoWidth = videoSize.width;
+ _videoHeight = videoSize.height;
}
-
- _reader = assetReader;
- _trackOutput = trackOutput;
+ play();
}
sk_sp RNSkiOSVideo::nextImage(double *timeStamp) {
- CMSampleBufferRef sampleBuffer = [_trackOutput copyNextSampleBuffer];
- if (!sampleBuffer) {
- NSLog(@"No sample buffer.");
- return nullptr;
- }
-
- // Extract the pixel buffer from the sample buffer
- CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ CMTime currentTime = [_player currentTime];
+ CVPixelBufferRef pixelBuffer =
+ [_videoOutput copyPixelBufferForItemTime:currentTime
+ itemTimeForDisplay:nullptr];
if (!pixelBuffer) {
NSLog(@"No pixel buffer.");
- CFRelease(sampleBuffer);
return nullptr;
}
- auto skImage = _context->makeImageFromNativeBuffer(
- reinterpret_cast(pixelBuffer));
+ auto skImage = _context->makeImageFromNativeBuffer((void *)pixelBuffer);
if (timeStamp) {
- CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
- *timeStamp = CMTimeGetSeconds(time);
+ *timeStamp = CMTimeGetSeconds(currentTime);
}
- CFRelease(sampleBuffer);
+ CVPixelBufferRelease(pixelBuffer);
return skImage;
}
@@ -118,16 +98,30 @@
}
void RNSkiOSVideo::seek(double timeInMilliseconds) {
- if (_reader) {
- [_reader cancelReading];
- _reader = nil;
- _trackOutput = nil;
+ CMTime seekTime =
+ CMTimeMakeWithSeconds(timeInMilliseconds / 1000.0, NSEC_PER_SEC);
+ [_player seekToTime:seekTime
+ toleranceBefore:kCMTimeZero
+ toleranceAfter:kCMTimeZero
+ completionHandler:^(BOOL finished) {
+ if (!finished) {
+ NSLog(@"Seek failed or was interrupted.");
+ }
+ }];
+}
+
+void RNSkiOSVideo::play() {
+ if (_player) {
+ [_player play];
+ _isPlaying = true;
}
+}
- CMTime startTime =
- CMTimeMakeWithSeconds(timeInMilliseconds / 1000.0, NSEC_PER_SEC);
- CMTimeRange timeRange = CMTimeRangeMake(startTime, kCMTimePositiveInfinity);
- setupReader(timeRange);
+void RNSkiOSVideo::pause() {
+ if (_player) {
+ [_player pause];
+ _isPlaying = false;
+ }
}
double RNSkiOSVideo::duration() { return _duration; }
@@ -138,4 +132,6 @@
return SkISize::Make(_videoWidth, _videoHeight);
}
+void RNSkiOSVideo::setVolume(float volume) { _player.volume = volume; }
+
} // namespace RNSkia
diff --git a/package/src/external/reanimated/useVideo.ts b/package/src/external/reanimated/useVideo.ts
index 71aac12040..4e46a6c3e9 100644
--- a/package/src/external/reanimated/useVideo.ts
+++ b/package/src/external/reanimated/useVideo.ts
@@ -1,15 +1,48 @@
-import { type FrameInfo } from "react-native-reanimated";
+import type { SharedValue, FrameInfo } from "react-native-reanimated";
import { useEffect, useMemo } from "react";
import { Skia } from "../../skia/Skia";
import type { SkImage, Video } from "../../skia/types";
+import { Platform } from "../../Platform";
import Rea from "./ReanimatedProxy";
-import {
- processVideoState,
- type Animated,
- type PlaybackOptions,
-} from "./video";
+
+export type Animated = SharedValue | T;
+// TODO: Move to useVideo.ts
+export interface PlaybackOptions {
+ playbackSpeed: Animated;
+ looping: Animated;
+ paused: Animated;
+ seek: Animated;
+ volume: Animated;
+}
+
+type Materialized = {
+ [K in keyof T]: T[K] extends Animated ? U : T[K];
+};
+
+export type MaterializedPlaybackOptions = Materialized<
+ Omit
+>;
+
+// TODO: move
+export const setFrame = (
+ video: Video,
+ currentFrame: SharedValue
+) => {
+ "worklet";
+ const img = video.nextImage();
+ if (img) {
+ if (currentFrame.value) {
+ currentFrame.value.dispose();
+ }
+ if (Platform.OS === "android") {
+ currentFrame.value = img.makeNonTextureImage();
+ } else {
+ currentFrame.value = img;
+ }
+ }
+};
const defaultOptions = {
playbackSpeed: 1,
@@ -17,6 +50,7 @@ const defaultOptions = {
paused: false,
seek: null,
currentTime: 0,
+ volume: 0,
};
const useOption = (value: Animated) => {
@@ -41,6 +75,7 @@ export const useVideo = (
const isPaused = useOption(userOptions?.paused ?? defaultOptions.paused);
const looping = useOption(userOptions?.looping ?? defaultOptions.looping);
const seek = useOption(userOptions?.seek ?? defaultOptions.seek);
+ const volume = useOption(userOptions?.volume ?? defaultOptions.volume);
const playbackSpeed = useOption(
userOptions?.playbackSpeed ?? defaultOptions.playbackSpeed
);
@@ -51,22 +86,62 @@ export const useVideo = (
const framerate = useMemo(() => video?.framerate() ?? 0, [video]);
const size = useMemo(() => video?.size() ?? { width: 0, height: 0 }, [video]);
const rotation = useMemo(() => video?.rotation() ?? 0, [video]);
+ Rea.useAnimatedReaction(
+ () => isPaused.value,
+ (paused) => {
+ if (paused) {
+ video?.pause();
+ } else {
+ lastTimestamp.value = -1;
+ video?.play();
+ }
+ }
+ );
+ Rea.useAnimatedReaction(
+ () => seek.value,
+ (value) => {
+ if (value !== null) {
+ video?.seek(value);
+ currentTime.value = value;
+ seek.value = null;
+ }
+ }
+ );
+ Rea.useAnimatedReaction(
+ () => volume.value,
+ (value) => {
+ video?.setVolume(value);
+ }
+ );
Rea.useFrameCallback((frameInfo: FrameInfo) => {
- processVideoState(
- video,
- duration,
- framerate,
- frameInfo.timestamp,
- {
- paused: isPaused.value,
- looping: looping.value,
- playbackSpeed: playbackSpeed.value,
- },
- currentTime,
- currentFrame,
- lastTimestamp,
- seek
+ "worklet";
+ if (!video) {
+ return;
+ }
+ if (isPaused.value) {
+ return;
+ }
+ const currentTimestamp = frameInfo.timestamp;
+ if (lastTimestamp.value === -1) {
+ lastTimestamp.value = currentTimestamp;
+ }
+ const delta = currentTimestamp - lastTimestamp.value;
+
+ const frameDuration = 1000 / framerate;
+ const currentFrameDuration = Math.floor(
+ frameDuration / playbackSpeed.value
);
+ const isOver = currentTime.value + delta > duration;
+ if (isOver && looping.value) {
+ seek.value = 0;
+ currentTime.value = seek.value;
+ lastTimestamp.value = currentTimestamp;
+ }
+ if (delta >= currentFrameDuration && !isOver) {
+ setFrame(video, currentFrame);
+ currentTime.value += delta;
+ lastTimestamp.value = currentTimestamp;
+ }
});
useEffect(() => {
diff --git a/package/src/external/reanimated/video.ts b/package/src/external/reanimated/video.ts
deleted file mode 100644
index 9720d8aa83..0000000000
--- a/package/src/external/reanimated/video.ts
+++ /dev/null
@@ -1,82 +0,0 @@
-import type { SharedValue } from "react-native-reanimated";
-
-import type { SkImage, Video } from "../../skia/types";
-import { Platform } from "../../Platform";
-
-export type Animated = SharedValue | T;
-
-export interface PlaybackOptions {
- playbackSpeed: Animated;
- looping: Animated;
- paused: Animated;
- seek: Animated;
-}
-
-type Materialized = {
- [K in keyof T]: T[K] extends Animated ? U : T[K];
-};
-
-export type MaterializedPlaybackOptions = Materialized<
- Omit
->;
-
-export const setFrame = (
- video: Video,
- currentFrame: SharedValue
-) => {
- "worklet";
- const img = video.nextImage();
- if (img) {
- if (currentFrame.value) {
- currentFrame.value.dispose();
- }
- if (Platform.OS === "android") {
- currentFrame.value = img.makeNonTextureImage();
- } else {
- currentFrame.value = img;
- }
- }
-};
-
-export const processVideoState = (
- video: Video | null,
- duration: number,
- framerate: number,
- currentTimestamp: number,
- options: Materialized>,
- currentTime: SharedValue,
- currentFrame: SharedValue,
- lastTimestamp: SharedValue,
- seek: SharedValue
-) => {
- "worklet";
- if (!video) {
- return;
- }
- if (options.paused) {
- return;
- }
- const delta = currentTimestamp - lastTimestamp.value;
-
- const frameDuration = 1000 / framerate;
- const currentFrameDuration = Math.floor(
- frameDuration / options.playbackSpeed
- );
- if (currentTime.value + delta >= duration && options.looping) {
- seek.value = 0;
- }
- if (seek.value !== null) {
- video.seek(seek.value);
- currentTime.value = seek.value;
- setFrame(video, currentFrame);
- lastTimestamp.value = currentTimestamp;
- seek.value = null;
- return;
- }
-
- if (delta >= currentFrameDuration) {
- setFrame(video, currentFrame);
- currentTime.value += delta;
- lastTimestamp.value = currentTimestamp;
- }
-};
diff --git a/package/src/renderer/__tests__/Video.spec.tsx b/package/src/renderer/__tests__/Video.spec.tsx
deleted file mode 100644
index ff2b086d1f..0000000000
--- a/package/src/renderer/__tests__/Video.spec.tsx
+++ /dev/null
@@ -1,166 +0,0 @@
-import type { SharedValue } from "react-native-reanimated";
-
-import type { SkImage, Video } from "../../skia/types";
-import {
- processVideoState,
- type MaterializedPlaybackOptions,
-} from "../../external/reanimated/video";
-
-const createValue = (value: T) => ({ value } as unknown as SharedValue);
-
-jest.mock("../../Platform", () => ({
- Platform: {
- OS: "ios",
- },
-}));
-
-// Test cases
-describe("Video Player", () => {
- let mockVideo: Video;
- let options: MaterializedPlaybackOptions;
- let currentTimestamp: number;
-
- const currentTime = createValue(0);
- const currentFrame = createValue(null);
- const lastTimestamp = createValue(0);
- const seek = createValue(null);
- const framerate = 30;
- const duration = 5000;
- beforeEach(() => {
- mockVideo = {
- __typename__: "Video",
- dispose: jest.fn(),
- framerate: jest.fn().mockReturnValue(framerate),
- duration: jest.fn().mockReturnValue(duration),
- seek: jest.fn(),
- nextImage: jest.fn().mockReturnValue({} as SkImage),
- rotation: jest.fn().mockReturnValue(0),
- size: jest.fn().mockReturnValue({ width: 0, height: 0 }),
- };
- options = {
- playbackSpeed: 1,
- looping: false,
- paused: false,
- };
- currentTimestamp = 0;
- currentTime.value = 0;
- currentFrame.value = null;
- lastTimestamp.value = 0;
- });
-
- test("should not update state when paused", () => {
- options.paused = true;
- processVideoState(
- mockVideo,
- duration,
- framerate,
- currentTimestamp,
- options,
- currentTime,
- currentFrame,
- lastTimestamp,
- seek
- );
- expect(currentTime.value).toBe(0);
- expect(currentFrame.value).toBeNull();
- expect(lastTimestamp.value).toBe(0);
- });
-
- test("should update state with next frame if not paused and delta exceeds frame duration", () => {
- currentTimestamp = 100;
- lastTimestamp.value = 0;
- processVideoState(
- mockVideo,
- duration,
- framerate,
- currentTimestamp,
- options,
- currentTime,
- currentFrame,
- lastTimestamp,
- seek
- );
- expect(currentFrame.value).not.toBeNull();
- expect(currentTime.value).toBe(100);
- expect(lastTimestamp.value).toBe(100);
- });
-
- test("should handle looping when current time exceeds video duration", () => {
- currentTimestamp = 5100;
- lastTimestamp.value = 0;
- currentTime.value = 5000;
- options.looping = true;
- processVideoState(
- mockVideo,
- duration,
- framerate,
- currentTimestamp,
- options,
- currentTime,
- currentFrame,
- lastTimestamp,
- seek
- );
- expect(seek.value).toBe(null);
- expect(currentTime.value).toBe(0);
- });
-
- test("should seek to specified time", () => {
- seek.value = 2000;
- processVideoState(
- mockVideo,
- duration,
- framerate,
- currentTimestamp,
- options,
- currentTime,
- currentFrame,
- lastTimestamp,
- seek
- );
- expect(mockVideo.seek).toHaveBeenCalledWith(2000);
- expect(currentTime.value).toBe(2000);
- expect(currentFrame.value).not.toBeNull();
- expect(lastTimestamp.value).toBe(currentTimestamp);
- expect(seek.value).toBeNull();
- });
-
- test("should not update frame if delta does not exceed frame duration", () => {
- currentTimestamp = 10;
- lastTimestamp.value = 0;
- processVideoState(
- mockVideo,
- duration,
- framerate,
- currentTimestamp,
- options,
- currentTime,
- currentFrame,
- lastTimestamp,
- seek
- );
- expect(currentFrame.value).toBeNull();
- expect(currentTime.value).toBe(0);
- expect(lastTimestamp.value).toBe(0);
- });
-
- test("should update frame based on playback speed", () => {
- options.playbackSpeed = 2; // double speed
- currentTimestamp = 100;
- lastTimestamp.value = 0;
- processVideoState(
- mockVideo,
- duration,
- framerate,
- currentTimestamp,
- options,
- currentTime,
- currentFrame,
- lastTimestamp,
- seek
- );
- expect(currentFrame.value).not.toBeNull();
- expect(currentTime.value).toBe(100);
- expect(lastTimestamp.value).toBe(100);
- });
-});
diff --git a/package/src/skia/types/Video/Video.ts b/package/src/skia/types/Video/Video.ts
index 7175de8cd8..5a23678d25 100644
--- a/package/src/skia/types/Video/Video.ts
+++ b/package/src/skia/types/Video/Video.ts
@@ -10,4 +10,7 @@ export interface Video extends SkJSIInstance<"Video"> {
seek(time: number): void;
rotation(): VideoRotation;
size(): { width: number; height: number };
+ pause(): void;
+ play(): void;
+ setVolume(volume: number): void;
}