本文整理汇总了C++中mydsp类的典型用法代码示例。如果您正苦于以下问题:C++ mydsp类的具体用法?C++ mydsp怎么用?C++ mydsp使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了mydsp类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。
示例1: init
/*
* init(samplingRate, bufferFrames)
* Initializes the Audio engine and the DSP code
* with samplingRate and bufferFrames.
* This method also looks for the [style:poly]
* metadata in the Faust code and initializes a
* polyphonic object or not based on that. init
* should be called before start.
*/
bool init(int samplingRate, int bufferSize) {
DSP.init(samplingRate);
inChanNumb = DSP.getNumInputs();
outChanNumb = DSP.getNumOutputs();
// configuring the UI
DSP.buildUserInterface(&mapUI);
DSP.buildUserInterface(&json);
jsonString = json.JSON();
if (jsonString.find("keyboard") != std::string::npos ||
jsonString.find("poly") != std::string::npos){
polyMax = 4;
DSPpoly = new mydsp_poly(polyMax, true);
DSPpoly->init(samplingRate);
} else {
polyMax = 0;
}
return (fAudioDevice.Open(((polyMax > 0) ? DSPpoly : &DSP), inChanNumb, outChanNumb, bufferSize, samplingRate) == 0);
}
开发者ID:onukore,项目名称:radium,代码行数:31,代码来源:ios-coreaudio-api.cpp
示例2: init
/*
* init(samplingRate, bufferFrames)
* Initializes the Audio engine and the DSP code
* with samplingRate and bufferFrames.
* This method also looks for the [style:poly]
* metadata in the Faust code and initializes a
* polyphonic object or not based on that. init
* should be called before start.
*/
void init(int samplingRate, int bufferFrames) {
// configuring global variables
SR = samplingRate;
bufferSize = bufferFrames;
vecSamps = bufferSize;
DSP.init(SR);
inChanNumb = DSP.getNumInputs();
outChanNumb = DSP.getNumOutputs();
// configuring the UI
DSP.buildUserInterface(&mapUI);
DSP.buildUserInterface(&json);
jsonString = json.JSON();
if(jsonString.find("keyboard") != std::string::npos ||
jsonString.find("poly") != std::string::npos){
polyMax = 4;
polyCoef = 1.0f / polyMax;
DSPpoly = new mydsp_poly(SR, bufferSize, polyMax);
}
else{
polyMax = 0;
}
// allocating memory for output channel
bufferout = new float *[outChanNumb];
for (int i = 0; i < outChanNumb; i++) {
bufferout[i] = new float[vecSamps];
}
// allocating memory for input channel
if (inChanNumb >= 1) {
bufferin = new float *[inChanNumb];
for (int i = 0; i < inChanNumb; i++) {
bufferin[i] = new float[vecSamps];
}
}
}
开发者ID:FlatIO,项目名称:faudiostream,代码行数:48,代码来源:android.cpp
示例3: json
//**************************************************************
// Native Faust API
//**************************************************************
#include <android/log.h>
#include "dsp_faust.h"
#include <stdio.h>
#include <string.h>
#define FAUSTFLOAT float
using namespace std;
OPENSL_STREAM *p; // the audio engine
mydsp DSP; // the monophonic Faust object
mydsp_poly *DSPpoly; // the polyphonic Faust object
MapUI mapUI; // the UI description
pthread_t audioThread; // native thread for audio
JSONUI json(DSP.getNumInputs(), DSP.getNumOutputs());
string jsonString;
// Global variables
int SR, bufferSize, vecSamps, polyMax, inChanNumb, outChanNumb, on;
float **bufferout, **bufferin, polyCoef;
/*
* init(samplingRate, bufferFrames)
* Initializes the Audio engine and the DSP code
* with samplingRate and bufferFrames.
* This method also looks for the [style:poly]
开发者ID:FlatIO,项目名称:faudiostream,代码行数:30,代码来源:android.cpp
示例4: main
//-------------------------------------------------------------------------
// MAIN
//-------------------------------------------------------------------------
int main(int argc, char *argv[])
{
char appname[256];
char rcfilename[256];
char* home = getenv("HOME");
snprintf(appname, 255, "%s", basename(argv[0]));
snprintf(rcfilename, 255, "%s/.%src", home, appname);
GUI* interface = new GTKUI (appname, &argc, &argv);
FUI* finterface = new FUI();
DSP.buildUserInterface(interface);
DSP.buildUserInterface(finterface);
DSP.buildUserInterface(new PrintUI());
#ifdef HTTPCTRL
httpdUI* httpdinterface = new httpdUI(appname, argc, argv);
DSP.buildUserInterface(httpdinterface);
std::cout << "HTTPD is on" << std::endl;
#endif
#ifdef OSCCTRL
GUI* oscinterface = new OSCUI(appname, argc, argv);
DSP.buildUserInterface(oscinterface);
#endif
jackaudio audio;
audio.init(appname, &DSP);
finterface->recallState(rcfilename);
audio.start();
#ifdef HTTPCTRL
httpdinterface->run();
#endif
#ifdef OSCCTRL
oscinterface->run();
#endif
interface->run();
audio.stop();
finterface->saveState(rcfilename);
// desallocation
delete interface;
delete finterface;
#ifdef HTTPCTRL
delete httpdinterface;
#endif
#ifdef OSCCTRL
delete oscinterface;
#endif
return 0;
}
开发者ID:antoniotuzzi,项目名称:faust,代码行数:58,代码来源:jack-gtk.cpp
示例5: main
/******************************************************************************
*******************************************************************************
MAIN PLAY THREAD
*******************************************************************************
*******************************************************************************/
int main( int argc, char *argv[] )
{
char name[256], dst[258];
char rcfilename[256];
char* home = getenv("HOME");
snprintf(name, 255, "%s", basename(argv[0]));
snprintf(dst, 257, "/%s/", name);
snprintf(rcfilename, 255, "%s/.%src", home, name);
QApplication myApp(argc, argv);
QTGUI* interface = new QTGUI();
FUI* finterface = new FUI();
DSP.buildUserInterface(interface);
DSP.buildUserInterface(finterface);
oscdsp osca (dst, argc, argv);
OSCUI* oscinterface = new OSCUI(name, argc, argv, &osca);
DSP.buildUserInterface(oscinterface);
snprintf(dst, 257, "/%s/", oscinterface->getRootName());
osca.setDest (dst);
osca.init (name, &DSP);
finterface->recallState(rcfilename);
osca.start ();
oscinterface->run();
interface->run();
myApp.setStyleSheet(interface->styleSheet());
myApp.exec();
interface->stop();
osca.stop();
finterface->saveState(rcfilename);
// desallocation
delete interface;
delete finterface;
delete oscinterface;
return 0;
}
开发者ID:harryhaaren,项目名称:faust,代码行数:51,代码来源:oscio-qt.cpp
示例6: main
//-------------------------------------------------------------------------
// MAIN
//-------------------------------------------------------------------------
int main(int argc, char *argv[] )
{
char appname[256];
char rcfilename[256];
char* home = getenv("HOME");
snprintf(appname, 255, "%s", basename(argv[0]));
snprintf(rcfilename, 255, "%s/.%src", home, appname);
CMDUI* interface = new CMDUI(argc, argv);
FUI* finterface = new FUI();
DSP.buildUserInterface(interface);
DSP.buildUserInterface(finterface);
#ifdef OSCCTRL
GUI* oscinterface = new OSCUI(appname, argc, argv);
DSP.buildUserInterface(oscinterface);
#endif
#ifdef HTTPCTRL
httpdUI* httpdinterface = new httpdUI(appname, argc, argv);
DSP.buildUserInterface(httpdinterface);
#endif
jackaudio audio;
audio.init(appname, &DSP);
interface->process_command();
audio.start();
#ifdef HTTPCTRL
httpdinterface->run();
#endif
#ifdef OSCCTRL
oscinterface->run();
#endif
interface->run();
audio.stop();
finterface->saveState(rcfilename);
return 0;
}
开发者ID:tsob,项目名称:fcpp2appls,代码行数:45,代码来源:jack-console.cpp
示例7: main
int main(int argc, char *argv[])
{
char appname[256];
char rcfilename[256];
char* home = getenv("HOME");
snprintf(appname, 255, "%s", basename(argv[0]));
snprintf(rcfilename, 255, "%s/.%src", home, appname);
GUI* interface = new QTGUI(argc, argv);
FUI* finterface = new FUI();
DSP.buildUserInterface(interface);
DSP.buildUserInterface(finterface);
#ifdef OSCCTRL
GUI* oscinterface = new OSCUI(appname, argc, argv);
DSP.buildUserInterface(oscinterface);
#endif
long srate = (long)lopt(argv, "--frequency", 44100);
int fpb = lopt(argv, "--buffer", 128);
portaudio audio (srate, fpb);
audio.init(appname, &DSP);
finterface->recallState(rcfilename);
audio.start();
#ifdef OSCCTRL
oscinterface->run();
#endif
interface->run();
audio.stop();
finterface->saveState(rcfilename);
return 0;
}
开发者ID:tsob,项目名称:fcpp2appls,代码行数:36,代码来源:pa-qt.cpp
示例8:
/*
* processDSP(threadID)
* Compute the DSP frames of the Faust object.
*/
void *processDSP(void *threadID) {
while (on) {
// getting input signal
if (inChanNumb >= 1)
android_AudioIn(p, bufferin[0], vecSamps);
// computing...
if (polyMax == 0)
DSP.compute(vecSamps, bufferin, bufferout);
else
DSPpoly->compute(vecSamps, bufferin, bufferout);
// sending output signal
android_AudioOut(p, bufferout, vecSamps);
}
}
开发者ID:FlatIO,项目名称:faudiostream,代码行数:20,代码来源:android.cpp
示例9: main
int main(int argc, char *argv[] )
{
float fnbsamples;
CMDUI* interface = new CMDUI(argc, argv);
DSP.buildUserInterface(interface);
interface->addOption("-n", &fnbsamples, 16, 0.0, 100000000.0);
if (DSP.getNumInputs() > 0) {
fprintf(stderr, "no inputs allowed\n");
exit(1);
}
// init signal processor and the user interface values
DSP.init(44100);
// modify the UI values according to the command line options
interface->process_command();
int nouts = DSP.getNumOutputs();
channels chan (kFrames, nouts);
int nbsamples = int(fnbsamples);
while (nbsamples > kFrames) {
DSP.compute(kFrames, 0, chan.buffers());
for (int i = 0; i < kFrames; i++) {
for (int c = 0; c < nouts; c++) {
printf("%8f\t", chan.buffers()[c][i]);
}
cout << endl;
}
nbsamples -= kFrames;
}
DSP.compute(nbsamples, 0, chan.buffers());
for (int i = 0; i < nbsamples; i++) {
for (int c = 0; c < nouts; c++) {
printf("%8f\t", chan.buffers()[c][i]);
}
cout << endl;
}
return 0;
}
开发者ID:antoniotuzzi,项目名称:faust,代码行数:43,代码来源:plot.cpp
示例10: main
int main(int argc, char *argv[])
{
SNDFILE* in_sf;
SNDFILE* out_sf;
SF_INFO in_info;
SF_INFO out_info;
unsigned int nAppend = 0; // number of frames to append beyond input file
if (argc < 3) {
fprintf(stderr,"*** USAGE: %s input_soundfile output_soundfile\n",argv[0]);
exit(1);
}
nAppend = loptrm(&argc, argv, "--continue", "-c", 0);
CMDUI* interface = new CMDUI(argc, argv);
DSP.buildUserInterface(interface);
interface->process_command();
// open input file
in_info.format = 0;
in_sf = sf_open(interface->input_file(), SFM_READ, &in_info);
if (in_sf == NULL) {
fprintf(stderr,"*** Input file not found.\n");
sf_perror(in_sf);
exit(1);
}
// open output file
out_info = in_info;
out_info.format = in_info.format;
out_info.channels = DSP.getNumOutputs();
out_sf = sf_open(interface->output_file(), SFM_WRITE, &out_info);
if (out_sf == NULL) {
fprintf(stderr,"*** Cannot write output file.\n");
sf_perror(out_sf);
exit(1);
}
// create separator and interleaver
Separator sep(kFrames, in_info.channels, DSP.getNumInputs());
Interleaver ilv(kFrames, DSP.getNumOutputs());
// init signal processor
DSP.init(in_info.samplerate);
//DSP.buildUserInterface(interface);
interface->process_init();
// process all samples
int nbf;
do {
nbf = READ_SAMPLE(in_sf, sep.input(), kFrames);
sep.separate();
DSP.compute(nbf, sep.outputs(), ilv.inputs());
ilv.interleave();
sf_writef_float(out_sf, ilv.output(), nbf);
//sf_write_raw(out_sf, ilv.output(), nbf);
} while (nbf == kFrames);
sf_close(in_sf);
// compute tail, if any
if (nAppend>0) {
FAUSTFLOAT *input = (FAUSTFLOAT*) calloc(nAppend * DSP.getNumInputs(), sizeof(FAUSTFLOAT));
FAUSTFLOAT *inputs[1] = { input };
Interleaver ailv(nAppend, DSP.getNumOutputs());
DSP.compute(nAppend, inputs, ailv.inputs());
ailv.interleave();
sf_writef_float(out_sf, ailv.output(), nAppend);
}
sf_close(out_sf);
}
开发者ID:stuartdocherty,项目名称:faudiostream,代码行数:73,代码来源:sndfile.cpp
示例11: json
dsp_faust() : json(DSP.getNumInputs(), DSP.getNumOutputs()),DSPpoly(0),on(false) {}
开发者ID:onukore,项目名称:radium,代码行数:1,代码来源:ios-coreaudio-api.cpp
示例12: main
//-------------------------------------------------------------------------
// MAIN
//-------------------------------------------------------------------------
int main(int argc, char *argv[])
{
char appname[256];
char rcfilename[256];
char* home = getenv("HOME");
int celt = lopt(argv, "--celt", -1);
const char* master_ip = lopts(argv, "--a", DEFAULT_MULTICAST_IP);
int master_port = lopt(argv, "--p", DEFAULT_PORT);
int mtu = lopt(argv, "--m", DEFAULT_MTU);
int latency = lopt(argv, "--l", 2);
snprintf(appname, 255, "%s", basename(argv[0]));
snprintf(rcfilename, 255, "%s/.%src", home, appname);
CMDUI* interface = new CMDUI(argc, argv);
FUI* finterface = new FUI();
DSP.buildUserInterface(interface);
DSP.buildUserInterface(finterface);
#ifdef OSCCTRL
GUI* oscinterface = new OSCUI(appname, argc, argv);
DSP.buildUserInterface(oscinterface);
#endif
#ifdef HTTPCTRL
httpdUI* httpdinterface = new httpdUI(appname, argc, argv);
DSP.buildUserInterface(httpdinterface);
#endif
netjackaudio audio(celt, master_ip, master_port, mtu, latency);
if (!audio.init(appname, &DSP)) {
return 0;
}
finterface->recallState(rcfilename);
if (!audio.start()) {
return 0;
}
#ifdef HTTPCTRL
httpdinterface->run();
#endif
#ifdef OSCCTRL
oscinterface->run();
#endif
interface->run();
audio.stop();
finterface->saveState(rcfilename);
// desallocation
delete interface;
delete finterface;
#ifdef HTTPCTRL
delete httpdinterface;
#endif
#ifdef OSCCTRL
delete oscinterface;
#endif
return 0;
}
开发者ID:antoniotuzzi,项目名称:faust,代码行数:66,代码来源:netjack-console.cpp
示例13: main
int main(int argc, char *argv[])
{
char name[256];
char rcfilename[256];
char* home = getenv("HOME");
snprintf(name, 255, "%s", basename(argv[0]));
snprintf(rcfilename, 255, "%s/.%src", home, basename(argv[0]));
long srate = (long)lopt(argv, "--frequency", -1);
int fpb = lopt(argv, "--buffer", 512);
QApplication myApp(argc, argv);
QTGUI* interface = new QTGUI();
DSP.buildUserInterface(interface);
FUI* finterface = new FUI();
DSP.buildUserInterface(finterface);
#ifdef HTTPCTRL
httpdUI* httpdinterface = new httpdUI(name, argc, argv);
DSP.buildUserInterface(httpdinterface);
#endif
#ifdef OSCCTRL
GUI* oscinterface = new OSCUI(name, argc, argv);
DSP.buildUserInterface(oscinterface);
#endif
coreaudio audio(srate, fpb);
audio.init(name, &DSP);
finterface->recallState(rcfilename);
audio.start();
#ifdef HTTPCTRL
httpdinterface->run();
#ifdef QRCODECTRL
interface->displayQRCode( httpdinterface->getTCPPort() );
#endif
#endif
#ifdef OSCCTRL
oscinterface->run();
#endif
interface->run();
myApp.setStyleSheet(STYLESHEET);
myApp.exec();
interface->stop();
audio.stop();
finterface->saveState(rcfilename);
// desallocation
delete interface;
delete finterface;
#ifdef HTTPCTRL
delete httpdinterface;
#endif
#ifdef OSCCTRL
delete oscinterface;
#endif
return 0;
}
开发者ID:antoniotuzzi,项目名称:faust,代码行数:65,代码来源:ca-qt.cpp
注:本文中的mydsp类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论