Skip to content

Commit

Permalink
feat: added functions to index and added working example
Browse files Browse the repository at this point in the history
  • Loading branch information
AminAllahham committed Jan 26, 2024
1 parent 5a10a0e commit 94223d1
Show file tree
Hide file tree
Showing 5 changed files with 196 additions and 32 deletions.
2 changes: 2 additions & 0 deletions android/src/main/AndroidManifest.xml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.liveaudiovisualizer">
<uses-permission android:name="android.permission.RECORD_AUDIO" />

</manifest>
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,37 @@
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.module.annotations.ReactModule;
import com.facebook.react.modules.core.DeviceEventManagerModule;

// Arrays
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;

import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import android.util.Log;
import java.util.Collections;

@ReactModule(name = LiveAudioVisualizerModule.NAME)
public class LiveAudioVisualizerModule extends ReactContextBaseJavaModule {
public static final String NAME = "LiveAudioVisualizer";
public static final String TAG = "LiveAudioVisualizer";
private static final String EVENT_AUDIO_STARTED = "audioStarted";
private static final String EVENT_VISUALIZATION_CHANGED = "VisualizationChanged";

private long lastUpdateTime = 0;
private static final double SECONDS_PER_UPDATE = 0.1;
private AudioRecord audioRecord;
private boolean isListening = false;
private List<Integer> audioDataList = new ArrayList<>();
private List<Double> audioDataList = Collections.synchronizedList(new ArrayList<>());
private Handler handler = new Handler(Looper.getMainLooper());
private static final int ITEMS_PER_UPDATE = 10;

// last 6 items before the current item
private static final int HISTORY_SIZE = 6;
private double[] history = new double[HISTORY_SIZE];
private int historyIndex = 0;



public LiveAudioVisualizerModule(ReactApplicationContext reactContext) {
super(reactContext);
}
Expand Down Expand Up @@ -86,41 +102,109 @@ private void stopAudioCapture() {
}
}

// Process audio data and send it to React Native
private void processData(short[] buffer) {
// For simplicity, this example just sends the average amplitude of the audio data
int sum = 0;
for (short sample : buffer) {
sum += Math.abs(sample);
}
int averageAmplitude = sum / buffer.length;
Log.d(TAG, "Processing data, buffer length: " + buffer.length);

int sum = 0;

for (short sample : buffer) {
sum += Math.abs(sample);
}

int averageAmplitude = sum / buffer.length;

// Add the normalized amplitude to the list (scaled to the range [0, 1])
double normalizedAmplitude = normalizeAmplitude(averageAmplitude);
audioDataList.add(normalizedAmplitude);

// Add the normalized amplitude to the history array (scaled to the range [0, 1])
history[historyIndex] = normalizedAmplitude;

// Move to the next index in the history array
historyIndex = (historyIndex + 1) % HISTORY_SIZE;

// Send the data to React Native every half second
long currentTime = System.currentTimeMillis();
if (currentTime - lastUpdateTime >= SECONDS_PER_UPDATE * 1000) {
lastUpdateTime = currentTime;

// Synchronize on the list to avoid race conditions
synchronized (audioDataList) {
// Convert amplitude values to a double array
double[] amplitudeArray = new double[audioDataList.size() + HISTORY_SIZE];

// Add the last 6 items from history to amplitudeArray
int historyStartIndex = (historyIndex + 1) % HISTORY_SIZE;
for (int i = 0; i < HISTORY_SIZE; i++) {
amplitudeArray[i] = history[(historyStartIndex + i) % HISTORY_SIZE];
}

// Add the items from audioDataList to amplitudeArray
for (int i = 0; i < audioDataList.size(); i++) {
amplitudeArray[HISTORY_SIZE + i] = audioDataList.get(i);
}

// Send the amplitude array to React Native
handler.post(() -> {
Log.d(TAG, "Sending event");
sendVisualizationEvent(amplitudeArray);
});

// Clear amplitude values for the next half second
audioDataList.clear();
}
}
}

// Normalize amplitude to the range [0, 1]
private double normalizeAmplitude(int amplitude) {
// Choose a suitable range based on the expected maximum amplitude
int maxAmplitude = 4000; // Adjust this based on your specific use case

// Add the average amplitude to the list
audioDataList.add(averageAmplitude);
// Ensure the amplitude is within the range [0, maxAmplitude]
amplitude = Math.max(0, Math.min(amplitude, maxAmplitude));

// Send the data to React Native every second
handler.post(() -> {
sendEvent(EVENT_VISUALIZATION_CHANGED, audioDataList.toString());
audioDataList.clear();
});
// Normalize to the range [0, 1]
return (double) amplitude / maxAmplitude;
}

private void sendEvent(String eventName, String data) {

private void sendEvent(String eventName, double[] data) {
WritableArray writableArray = Arguments.createArray();

if (data != null) {
for (double value : data) {
writableArray.pushDouble(value);
}
}

getReactApplicationContext()
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, data);
}
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, writableArray);
}


@ReactMethod
public void setSensitivity(int sensitivity, Promise promise) {
// Implement sensitivity setting logic
promise.resolve();
promise.resolve("Sensitivity set");
}

private void sendVisualizationEvent(double[] data) {
WritableArray writableArray = Arguments.createArray();
for (double value : data) {
writableArray.pushDouble(value);
}

getReactApplicationContext()
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(EVENT_VISUALIZATION_CHANGED, writableArray);
}

@ReactMethod
public void addEventListener(String eventName, Promise promise) {
if (eventName.equals(EVENT_VISUALIZATION_CHANGED)) {
promise.resolve();
promise.resolve("Event listener added");
} else {
promise.reject("INVALID_EVENT", "Invalid event name");
}
Expand Down
1 change: 1 addition & 0 deletions example/android/app/src/main/AndroidManifest.xml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">

<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />

<application
android:name=".MainApplication"
Expand Down
61 changes: 56 additions & 5 deletions example/src/App.tsx
Original file line number Diff line number Diff line change
@@ -1,31 +1,82 @@
import * as React from 'react';

import { StyleSheet, View, Text } from 'react-native';
import { multiply } from 'react-native-live-audio-visualizer';
import { DeviceEventEmitter, StyleSheet, View } from 'react-native';
import {
RequestAudioPermission,
startAudioListening,
stopAudioListening,
} from 'react-native-live-audio-visualizer';

export default function App() {
const [result, setResult] = React.useState<number | undefined>();
const [result, setResult] = React.useState<number[]>([]);

React.useEffect(() => {
multiply(3, 7).then(setResult);
RequestAudioPermission(
'This app needs audio permission',
'Permission for audio',
'OK'
);

startAudioListening().then(() => {
console.log('Audio listening started');
});

DeviceEventEmitter.addListener('VisualizationChanged', (data) => {
console.log('Event received:', data);

setResult(data);
});

return () => {
stopAudioListening().then(() => {
console.log('Audio listening stopped');
});
};
}, []);

return (
<View style={styles.container}>
<Text>Result: {result}</Text>
<View style={styles.waveContainer}>
{result.map((wave, index) => (
<View
key={index}
style={[
styles.waveItem,
{
height: wave * 100,
},
]}
/>
))}
</View>
</View>
);
}

const styles = StyleSheet.create({
container: {
backgroundColor: 'black',
flex: 1,
alignItems: 'center',
justifyContent: 'center',
},
waveContainer: {
alignItems: 'center',
justifyContent: 'center',
width: '100%',
flexDirection: 'row',
},
box: {
width: 60,
height: 60,
marginVertical: 20,
},
waveItem: {
width: 12,
backgroundColor: '#B692F6',
marginHorizontal: 1,
borderRadius: 4,
minHeight: 10,
marginRight: 8,
},
});
34 changes: 30 additions & 4 deletions src/index.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import { NativeModules } from 'react-native';
import { NativeModules, PermissionsAndroid, Platform } from 'react-native';

const LiveAudioVisualizer = NativeModules.LiveAudioVisualizer;

if (!LiveAudioVisualizer) {
throw new Error('LiveAudioVisualizer native module is not available.');
}

export const LiveAudioVisualizerModal = LiveAudioVisualizer;
export function startAudioListening(): Promise<void> {
return LiveAudioVisualizer.startAudioListening();
}
Expand All @@ -14,7 +19,28 @@ export function setSensitivity(sensitivity: number): Promise<void> {
return LiveAudioVisualizer.setSensitivity(sensitivity);
}

export function addEventListener(callback: (data: any) => void): void {
LiveAudioVisualizer.addListener('audioStarted', callback);
LiveAudioVisualizer.addListener('VisualizationChanged', callback);
export async function RequestAudioPermission(
message: string,
title: string,
buttonPositive: string
) {
if (Platform.OS === 'android') {
try {
const granted = await PermissionsAndroid.request(
'android.permission.RECORD_AUDIO',
{
title,
message,
buttonPositive,
}
);
if (granted === PermissionsAndroid.RESULTS.GRANTED) {
console.log('Audio permission granted');
} else {
console.log('Audio permission denied');
}
} catch (err) {
console.warn(err);
}
}
}

0 comments on commit 94223d1

Please sign in to comment.