我已成功将 Superpowered SDK CrossExample 项目导入到 android studio 中,并在 Samsung galaxy S3 和模拟器上对其进行了测试。现在我正在实现一个录制选项,以使用 SuperpoweredRecorder.h 录制应用程序播放的音频。我需要有关如何正确设置 *tempPath 和 *destinationPath 变量以成功保存记录的帮助。
该项目构建正常,但当我尝试在 Galaxy S3 或模拟器上运行该应用程序时,我收到以下错误消息:
19565-19565/com.superpowered.crossexample A/libc:0x006f0070 (code=1) 处的致命信号 11 (SIGSEGV),线程 19565 (ed.crossexample)
添加 *tempPath 和 *destinationPath 指针后发生此错误,因此我相信在成功设置记录路径后此错误将消失。
SuperpoweredRecorder.h 文档的链接:http://superpowered.com/docs/class_superpowered_recorder.html
在查看文档时,我还必须向代码中添加什么才能使用 tempPath 和 destinationPath 变量?
我自己添加的代码在正双斜杠之间。
SuperpoweredExample.h
#ifndef Header_SuperpoweredExample
#define Header_SuperpoweredExample
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <math.h>
#include <pthread.h>
#include "SuperpoweredExample.h"
#include "SuperpoweredAdvancedAudioPlayer.h"
#include "SuperpoweredFilter.h"
#include "SuperpoweredRoll.h"
#include "SuperpoweredFlanger.h"
#include "SuperpoweredMixer.h"
#include "SuperpoweredRecorder.h"
#define NUM_BUFFERS 2
#define HEADROOM_DECIBEL 3.0f
static const float headroom = powf(10.0f, -HEADROOM_DECIBEL * 0.025);
class SuperpoweredExample {
public:
SuperpoweredExample(const char *path, int *params);
~SuperpoweredExample();
void process(SLAndroidSimpleBufferQueueItf caller);
void onPlayPause(bool play);
void onCrossfader(int value);
void onFxSelect(int value);
void onFxOff();
void onFxValue(int value);
//Added function declaration
void onRecord(bool record);
//
private:
SLObjectItf openSLEngine, outputMix, bufferPlayer;
SLAndroidSimpleBufferQueueItf bufferQueue;
SuperpoweredAdvancedAudioPlayer *playerA, *playerB;
SuperpoweredRoll *roll;
SuperpoweredFilter *filter;
SuperpoweredFlanger *flanger;
SuperpoweredStereoMixer *mixer;
SuperpoweredRecorder *recorder;
//added object variables
const char *tempPath;
const char *destinationPath;
//
unsigned char activeFx;
float crossValue, volA, volB;
pthread_mutex_t mutex;
float *outputBuffer[NUM_BUFFERS];
int currentBuffer, buffersize;
};
#endif
SuperpoweredExample.cpp
#include "SuperpoweredExample.h"
#include <jni.h>
#include <stdlib.h>
#include <stdio.h>
#include <android/log.h>
static void playerEventCallbackA(void *clientData, SuperpoweredAdvancedAudioPlayerEvent event, void *value) {
if (event == SuperpoweredAdvancedAudioPlayerEvent_LoadSuccess) {
SuperpoweredAdvancedAudioPlayer *playerA = * ((SuperpoweredAdvancedAudioPlayer **)clientData);
playerA->setBpm(126.0f);
playerA->setFirstBeatMs(353);
playerA->setPosition(playerA->firstBeatMs, false, false);
};
}
static void playerEventCallbackB(void *clientData, SuperpoweredAdvancedAudioPlayerEvent event, void *value) {
if (event == SuperpoweredAdvancedAudioPlayerEvent_LoadSuccess) {
SuperpoweredAdvancedAudioPlayer *playerB = *((SuperpoweredAdvancedAudioPlayer **)clientData);
playerB->setBpm(123.0f);
playerB->setFirstBeatMs(40);
playerB->setPosition(playerB->firstBeatMs, false, false);
};
}
static void openSLESCallback(SLAndroidSimpleBufferQueueItf caller, void *pContext) {
((SuperpoweredExample *)pContext)->process(caller);}
static const SLboolean requireds[2] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
SuperpoweredExample::SuperpoweredExample(const char *path, int *params) : currentBuffer(0), buffersize(params[5]), activeFx(0), crossValue(0.0f), volB(0.0f), volA(1.0f * headroom) {
pthread_mutex_init(&mutex, NULL); // This will keep our player volumes and playback states in sync.
for (int n = 0; n < NUM_BUFFERS; n++) outputBuffer[n] = (float *)memalign(16, (buffersize + 16) * sizeof(float) * 2);
unsigned int samplerate = params[4];
playerA = new SuperpoweredAdvancedAudioPlayer(&playerA , playerEventCallbackA, samplerate, 0);
playerA->open(path, params[0], params[1]);
playerB = new SuperpoweredAdvancedAudioPlayer(&playerB, playerEventCallbackB, samplerate, 0);
playerB->open(path, params[2], params[3]);
playerA->syncMode = playerB->syncMode = SuperpoweredAdvancedAudioPlayerSyncMode_TempoAndBeat;
roll = new SuperpoweredRoll(samplerate);
filter = new SuperpoweredFilter(SuperpoweredFilter_Resonant_Lowpass, samplerate);
flanger = new SuperpoweredFlanger(samplerate);
mixer = new SuperpoweredStereoMixer();
//Create SuperpoweredRecorder and allocate memory for it
recorder = new SuperpoweredRecorder(tempPath, samplerate);
//
// Create the OpenSL ES engine.
slCreateEngine(&openSLEngine, 0, NULL, 0, NULL, NULL);
(*openSLEngine)->Realize(openSLEngine, SL_BOOLEAN_FALSE);
SLEngineItf openSLEngineInterface = NULL;
(*openSLEngine)->GetInterface(openSLEngine, SL_IID_ENGINE, &openSLEngineInterface);
// Create the output mix.
(*openSLEngineInterface)->CreateOutputMix(openSLEngineInterface, &outputMix, 0, NULL, NULL);
(*outputMix)->Realize(outputMix, SL_BOOLEAN_FALSE);
SLDataLocator_OutputMix outputMixLocator = { SL_DATALOCATOR_OUTPUTMIX, outputMix };
// Create the buffer queue player.
SLDataLocator_AndroidSimpleBufferQueue bufferPlayerLocator = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, NUM_BUFFERS };
SLDataFormat_PCM bufferPlayerFormat = { SL_DATAFORMAT_PCM, 2, samplerate * 1000, SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16, SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT, SL_BYTEORDER_LITTLEENDIAN };
SLDataSource bufferPlayerSource = { &bufferPlayerLocator, &bufferPlayerFormat };
const SLInterfaceID bufferPlayerInterfaces[1] = { SL_IID_BUFFERQUEUE };
SLDataSink bufferPlayerOutput = { &outputMixLocator, NULL };
(*openSLEngineInterface)->CreateAudioPlayer(openSLEngineInterface, &bufferPlayer, &bufferPlayerSource, &bufferPlayerOutput, 1, bufferPlayerInterfaces, requireds);
(*bufferPlayer)->Realize(bufferPlayer, SL_BOOLEAN_FALSE);
// Initialize and start the buffer queue.
(*bufferPlayer)->GetInterface(bufferPlayer, SL_IID_BUFFERQUEUE, &bufferQueue);
(*bufferQueue)->RegisterCallback(bufferQueue, openSLESCallback, this);
memset(outputBuffer[0], 0, buffersize * 4);
memset(outputBuffer[1], 0, buffersize * 4);
(*bufferQueue)->Enqueue(bufferQueue, outputBuffer[0], buffersize * 4);
(*bufferQueue)->Enqueue(bufferQueue, outputBuffer[1], buffersize * 4);
SLPlayItf bufferPlayerPlayInterface;
(*bufferPlayer)->GetInterface(bufferPlayer, SL_IID_PLAY, &bufferPlayerPlayInterface);
(*bufferPlayerPlayInterface)->SetPlayState(bufferPlayerPlayInterface, SL_PLAYSTATE_PLAYING);
}
SuperpoweredExample::~SuperpoweredExample() {
for (int n = 0; n < NUM_BUFFERS; n++) free(outputBuffer[n]);
delete playerA;
delete playerB;
delete mixer;
pthread_mutex_destroy(&mutex);
}
void SuperpoweredExample::onPlayPause(bool play) {
pthread_mutex_lock(&mutex);
if (!play) {
playerA->pause();
playerB->pause();
} else {
bool masterIsA = (crossValue <= 0.5f);
playerA->play(!masterIsA);
playerB->play(masterIsA);
};
pthread_mutex_unlock(&mutex);
}
//onRecord function
void SuperpoweredExample::onRecord(bool record) {
pthread_mutex_lock(&mutex);
if (!record) {
recorder->stop();
} else {
recorder->start(destinationPath);
};
pthread_mutex_unlock(&mutex);
}
//
void SuperpoweredExample::onCrossfader(int value) {
pthread_mutex_lock(&mutex);
crossValue = float(value) * 0.01f;
if (crossValue < 0.01f) {
volA = 1.0f * headroom;
volB = 0.0f;
} else if (crossValue > 0.99f) {
volA = 0.0f;
volB = 1.0f * headroom;
} else { // constant power curve
volA = cosf(M_PI_2 * crossValue) * headroom;
volB = cosf(M_PI_2 * (1.0f - crossValue)) * headroom;
};
pthread_mutex_unlock(&mutex);
}
void SuperpoweredExample::onFxSelect(int value) {
__android_log_print(ANDROID_LOG_VERBOSE, "SuperpoweredExample", "FXSEL %i", value);
activeFx = value;
}
void SuperpoweredExample::onFxOff() {
filter->enable(false);
roll->enable(false);
flanger->enable(false);
}
#define MINFREQ 60.0f
#define MAXFREQ 20000.0f
static inline float floatToFrequency(float value) {
if (value > 0.97f) return MAXFREQ;
if (value < 0.03f) return MINFREQ;
value = powf(10.0f, (value + ((0.4f - fabsf(value - 0.4f)) * 0.3f)) * log10f(MAXFREQ - MINFREQ)) + MINFREQ;
return value < MAXFREQ ? value : MAXFREQ;
}
void SuperpoweredExample::onFxValue(int ivalue) {
float value = float(ivalue) * 0.01f;
switch (activeFx) {
case 1:
filter->setResonantParameters(floatToFrequency(1.0f - value), 0.2f);
filter->enable(true);
flanger->enable(false);
roll->enable(false);
break;
case 2:
if (value > 0.8f) roll->beats = 0.0625f;
else if (value > 0.6f) roll->beats = 0.125f;
else if (value > 0.4f) roll->beats = 0.25f;
else if (value > 0.2f) roll->beats = 0.5f;
else roll->beats = 1.0f;
roll->enable(true);
filter->enable(false);
flanger->enable(false);
break;
default:
flanger->setWet(value);
flanger->enable(true);
filter->enable(false);
roll->enable(false);
};
}
void SuperpoweredExample::process(SLAndroidSimpleBufferQueueItf caller) {
pthread_mutex_lock(&mutex);
float *stereoBuffer = outputBuffer[currentBuffer];
bool masterIsA = (crossValue <= 0.5f);
float masterBpm = masterIsA ? playerA->currentBpm : playerB->currentBpm;
double msElapsedSinceLastBeatA = playerA->msElapsedSinceLastBeat; // When playerB needs it, playerA has already stepped this value, so save it now.
bool silence = !playerA->process(stereoBuffer, false, buffersize, volA, masterBpm, playerB->msElapsedSinceLastBeat);
if (playerB->process(stereoBuffer, !silence, buffersize, volB, masterBpm, msElapsedSinceLastBeatA)) silence = false;
roll->bpm = flanger->bpm = masterBpm; // Syncing fx is one line.
if (roll->process(silence ? NULL : stereoBuffer, stereoBuffer, buffersize) && silence) silence = false;
if (!silence) {
filter->process(stereoBuffer, stereoBuffer, buffersize);
flanger->process(stereoBuffer, stereoBuffer, buffersize);
//adding buffer to process function
recorder->process(stereoBuffer, 0, buffersize);
//
};
pthread_mutex_unlock(&mutex);
// The stereoBuffer is ready now, let's put the finished audio into the requested buffers.
if (silence) memset(stereoBuffer, 0, buffersize * 4); else SuperpoweredStereoMixer::floatToShortInt(stereoBuffer, (short int *)stereoBuffer, buffersize);
(*caller)->Enqueue(caller, stereoBuffer, buffersize * 4);
if (currentBuffer < NUM_BUFFERS - 1) currentBuffer++; else currentBuffer = 0;
}
extern "C" {
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_SuperpoweredExample(JNIEnv *javaEnvironment, jobject self, jstring apkPath, jlongArray offsetAndLength);
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onPlayPause(JNIEnv *javaEnvironment, jobject self, jboolean play);
//connect onRecord with java
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onRecord(JNIEnv *javaEnvironment, jobject self, jboolean record);
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onCrossfader(JNIEnv *javaEnvironment, jobject self, jint value);
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onFxSelect(JNIEnv *javaEnvironment, jobject self, jint value);
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onFxOff(JNIEnv *javaEnvironment, jobject self);
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onFxValue(JNIEnv *javaEnvironment, jobject self, jint value);
}
static SuperpoweredExample *example = NULL;
// Android is not passing more than 2 custom parameters, so we had to pack file offsets and lengths into an array.
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_SuperpoweredExample(JNIEnv *javaEnvironment, jobject self, jstring apkPath, jlongArray params) {
// Convert the input jlong array to a regular int array.
jlong *longParams = javaEnvironment->GetLongArrayElements(params, JNI_FALSE);
int arr[6];
for (int n = 0; n < 6; n++) arr[n] = longParams[n];
javaEnvironment->ReleaseLongArrayElements(params, longParams, JNI_ABORT);
const char *path = javaEnvironment->GetStringUTFChars(apkPath, JNI_FALSE);
example = new SuperpoweredExample(path, arr);
javaEnvironment->ReleaseStringUTFChars(apkPath, path);
}
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onPlayPause(JNIEnv *javaEnvironment, jobject self, jboolean play) {
example->onPlayPause(play);
}
//connect onRecord with java code
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onRecord(JNIEnv *javaEnvironment, jobject self, jboolean record) {
example->onRecord(record);
}
//
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onCrossfader(JNIEnv *javaEnvironment, jobject self, jint value) {
example->onCrossfader(value);
}
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onFxSelect(JNIEnv *javaEnvironment, jobject self, jint value) {
example->onFxSelect(value);
}
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onFxOff(JNIEnv *javaEnvironment, jobject self) {
example->onFxOff();
}
JNIEXPORT void Java_com_superpowered_crossexample_MainActivity_onFxValue(JNIEnv *javaEnvironment, jobject self, jint value) {
example->onFxValue(value);
}
主 Activity .java
package com.superpowered.crossexample;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.media.AudioManager;
import android.os.Build;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import java.io.IOException;
public class MainActivity extends ActionBarActivity {
boolean playing = false;
//Added variable
boolean recording = false;
//
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Get the device's sample rate and buffer size to enable low-latency Android audio output, if available.
String samplerateString = null, buffersizeString = null;
if (Build.VERSION.SDK_INT >= 17) {
AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
samplerateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
buffersizeString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
}
if (samplerateString == null) samplerateString = "44100";
if (buffersizeString == null) buffersizeString = "512";
// Files under res/raw are not compressed, just copied into the APK. Get the offset and length to know where our files are located.
AssetFileDescriptor fd0 = getResources().openRawResourceFd(R.raw.lycka), fd1 = getResources().openRawResourceFd(R.raw.nuyorica);
long[] params = {
fd0.getStartOffset(),
fd0.getLength(),
fd1.getStartOffset(),
fd1.getLength(),
Integer.parseInt(samplerateString),
Integer.parseInt(buffersizeString)
};
try {
fd0.getParcelFileDescriptor().close();
fd1.getParcelFileDescriptor().close();
} catch (IOException e) {}
// Arguments: path to the APK file, offset and length of the two resource files, sample rate, audio buffer size.
SuperpoweredExample(getPackageResourcePath(), params);
// crossfader events
final SeekBar crossfader = (SeekBar)findViewById(R.id.crossFader);
crossfader.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
onCrossfader(progress);
}
public void onStartTrackingTouch(SeekBar seekBar) {}
public void onStopTrackingTouch(SeekBar seekBar) {}
});
// fx fader events
final SeekBar fxfader = (SeekBar)findViewById(R.id.fxFader);
fxfader.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
onFxValue(progress);
}
public void onStartTrackingTouch(SeekBar seekBar) {
onFxValue(seekBar.getProgress());
}
public void onStopTrackingTouch(SeekBar seekBar) {
onFxOff();
}
});
// fx select event
final RadioGroup group = (RadioGroup)findViewById(R.id.radioGroup1);
group.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
public void onCheckedChanged(RadioGroup radioGroup, int checkedId) {
RadioButton checkedRadioButton = (RadioButton)radioGroup.findViewById(checkedId);
onFxSelect(radioGroup.indexOfChild(checkedRadioButton));
}
});
}
public void SuperpoweredExample_PlayPause(View button) { // Play/pause.
playing = !playing;
onPlayPause(playing);
Button b = (Button) findViewById(R.id.playPause);
b.setText(playing ? "Pause" : "Play");
}
//Added the following Record method
public void SuperpoweredExample_Record(View button) { // Record/Stop Recording.
recording = !recording;
onRecord(recording);
Button r = (Button) findViewById(R.id.rec);
r.setText(recording ? "Start Recording" : "Stop Recording");
}
//
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
private native void SuperpoweredExample(String apkPath, long[] offsetAndLength);
private native void onPlayPause(boolean play);
private native void onCrossfader(int value);
private native void onFxSelect(int value);
private native void onFxOff();
private native void onFxValue(int value);
//Added the following line
private native void onRecord(boolean record);
//
static {
System.loadLibrary("SuperpoweredExample");
}
}
最佳答案
如果您遇到段错误,很可能是您的应用程序没有声明 WRITE_EXTERNAL_STORAGE 权限(CrossExample 没有,因为它没有写入任何内容).如果它仍然存在,则包含目录可能不存在 - 例如“/sdcard/不存在/不存在”。
关于android - Superpowered SDK : Implementing SuperpoweredRecorder. h 和设置录制路径,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/28989182/
我有一个Ruby程序,它使用rubyzip压缩XML文件的目录树。gem。我的问题是文件开始变得很重,我想提高压缩级别,因为压缩时间不是问题。我在rubyzipdocumentation中找不到一种为创建的ZIP文件指定压缩级别的方法。有人知道如何更改此设置吗?是否有另一个允许指定压缩级别的Ruby库? 最佳答案 这是我通过查看rubyzip内部创建的代码。level=Zlib::BEST_COMPRESSIONZip::ZipOutputStream.open(zip_file)do|zip|Dir.glob("**/*")d
我在使用omniauth/openid时遇到了一些麻烦。在尝试进行身份验证时,我在日志中发现了这一点:OpenID::FetchingError:Errorfetchinghttps://www.google.com/accounts/o8/.well-known/host-meta?hd=profiles.google.com%2Fmy_username:undefinedmethod`io'fornil:NilClass重要的是undefinedmethodio'fornil:NilClass来自openid/fetchers.rb,在下面的代码片段中:moduleNetclass
我正在查看instance_variable_set的文档并看到给出的示例代码是这样做的:obj.instance_variable_set(:@instnc_var,"valuefortheinstancevariable")然后允许您在类的任何实例方法中以@instnc_var的形式访问该变量。我想知道为什么在@instnc_var之前需要一个冒号:。冒号有什么作用? 最佳答案 我的第一直觉是告诉你不要使用instance_variable_set除非你真的知道你用它做什么。它本质上是一种元编程工具或绕过实例变量可见性的黑客攻击
我想设置一个默认日期,例如实际日期,我该如何设置?还有如何在组合框中设置默认值顺便问一下,date_field_tag和date_field之间有什么区别? 最佳答案 试试这个:将默认日期作为第二个参数传递。youcorrectlysetthedefaultvalueofcomboboxasshowninyourquestion. 关于ruby-on-rails-date_field_tag,如何设置默认日期?[rails上的ruby],我们在StackOverflow上找到一个类似的问
我正在玩HTML5视频并且在ERB中有以下片段:mp4视频从在我的开发环境中运行的服务器很好地流式传输到chrome。然而firefox显示带有海报图像的视频播放器,但带有一个大X。问题似乎是mongrel不确定ogv扩展的mime类型,并且只返回text/plain,如curl所示:$curl-Ihttp://0.0.0.0:3000/pr6.ogvHTTP/1.1200OKConnection:closeDate:Mon,19Apr201012:33:50GMTLast-Modified:Sun,18Apr201012:46:07GMTContent-Type:text/plain
我在Rails应用程序中使用CarrierWave/Fog将视频上传到AmazonS3。有没有办法判断上传的进度,让我可以显示上传进度如何? 最佳答案 CarrierWave和Fog本身没有这种功能;你需要一个前端uploader来显示进度。当我不得不解决这个问题时,我使用了jQueryfileupload因为我的堆栈中已经有jQuery。甚至还有apostonCarrierWaveintegration因此您只需按照那里的说明操作即可获得适用于您的应用的进度条。 关于ruby-on-r
相信很多人在录制视频的时候都会遇到各种各样的问题,比如录制的视频没有声音。屏幕录制为什么没声音?今天小编就和大家分享一下如何录制音画同步视频的具体操作方法。如果你有录制的视频没有声音,你可以试试这个方法。 一、检查是否打开电脑系统声音相信很多小伙伴在录制视频后会发现录制的视频没有声音,屏幕录制为什么没声音?如果当时没有打开音频录制,则录制好的视频是没有声音的。因此,建议在录制前进行检查。屏幕上没有声音,很可能是因为你的电脑系统的声音被禁止了。您只需打开电脑系统的声音,即可录制音频和图画同步视频。操作方法:步骤1:点击电脑屏幕右下侧的“小喇叭”图案,在上方的选项中,选择“声音”。 步骤2:在“声
如何使此根路径转到:“/dashboard”而不仅仅是http://example.com?root:to=>'dashboard#index',:constraints=>lambda{|req|!req.session[:user_id].blank?} 最佳答案 您可以通过以下方式实现:root:to=>redirect('/dashboard')match'/dashboard',:to=>"dashboard#index",:constraints=>lambda{|req|!req.session[:user_id].b
最近因为项目需要,需要将Android手机系统自带的某个系统软件反编译并更改里面某个资源,并重新打包,签名生成新的自定义的apk,下面我来介绍一下我的实现过程。APK修改,分为以下几步:反编译解包,修改,重打包,修改签名等步骤。安卓apk修改准备工作1.系统配置好JavaJDK环境变量2.需要root权限的手机(针对系统自带apk,其他软件免root)3.Auto-Sign签名工具4.apktool工具安卓apk修改开始反编译本文拿Android系统里面的Settings.apk做demo,具体如何将apk获取出来在此就不过多介绍了,直接进入主题:按键win+R输入cmd,打开命令窗口,并将路
我正在尝试为我的iOS应用程序设置cocoapods但是当我执行命令时:sudogemupdate--system我收到错误消息:当前已安装最新版本。中止。当我进入cocoapods的下一步时:sudogeminstallcocoapods我在MacOS10.8.5上遇到错误:ERROR:Errorinstallingcocoapods:cocoapods-trunkrequiresRubyversion>=2.0.0.我在MacOS10.9.4上尝试了同样的操作,但出现错误:ERROR:Couldnotfindavalidgem'cocoapods'(>=0),hereiswhy:U