Browse Source

Remove the code within '#ifdef deleted' statements.

master
Reece H. Dunn 9 years ago
parent
commit
186916afb6

+ 0
- 16
src/compiledata.cpp View File

total = 0; total = 0;
for(frame=0; frame < spectseq->numframes; frame++) for(frame=0; frame < spectseq->numframes; frame++)
{ {

#ifdef deleted
for(ix=0; ix<8; ix++)
{
// find which files have certain markers set
if(spectseq->frames[frame]->markers & (1<<ix))
{
markers_used[ix]++;
if((ix==3) || (ix==4))
{
fprintf(f_errors,"Marker %d: %s\n",ix,path);
}
}
}
#endif

if(spectseq->frames[frame]->keyframe) if(spectseq->frames[frame]->keyframe)
{ {
if(seq_out.n_frames == 1) if(seq_out.n_frames == 1)

+ 1
- 139
src/extras.cpp View File

//****************************************************************************************************** //******************************************************************************************************




#ifdef deleted
static int RuLex_sorter(char **a, char **b)
{//=======================================
char *pa, *pb;
int xa, xb;
int ix;

pa = *a;
pb = *b;

xa = strlen(pa)-1;
xb = strlen(pb)-1;

while((xa >= 0) && (xb >= 0))
{
if((ix = (pa[xa] - pb[xb])) != 0)
return(ix);

xa--;
xb--;
}
return(pa - pb);
} /* end of strcmp2 */
#endif


static const unsigned short KOI8_R[0x60] = { static const unsigned short KOI8_R[0x60] = {
0x2550, 0x2551, 0x2552, 0x0451, 0x2553, 0x2554, 0x2555, 0x2556, // a0 0x2550, 0x2551, 0x2552, 0x0451, 0x2553, 0x2554, 0x2555, 0x2556, // a0
0x2557, 0x2558, 0x2559, 0x255a, 0x255b, 0x255c, 0x255d, 0x255e, // a8 0x2557, 0x2558, 0x2559, 0x255a, 0x255b, 0x255c, 0x255d, 0x255e, // a8
if((c2 = pronounce[ix+1]) == 'i') if((c2 = pronounce[ix+1]) == 'i')
{ {
defer_stress =1; defer_stress =1;
#ifdef deleted
if(stress == 4)
{
*p++ = 'i';
c =':';
}
else
#endif
{
c = 'I';
}
c = 'I';
ix++; ix++;
} }
} }
int syllables; int syllables;
} SUFFIX; } SUFFIX;


#ifdef deleted
FILE *f_roots;
int sfx;
const char *suffix;
int wlen;
int len;
static SUFFIX suffixes[] = {
{NULL,0},
{"ичу",2},
{"ского",2},
{"ская",2},
{"ски",1},
{"ские",2},
{"ский",1},
{"ским",1},
{"ское",2},
{"ской",1},
{"ском",1},
{"скую",2},

{"а",1},
{"е",1},
{"и",1},

{NULL,0}};
#endif

memset(counts,0,sizeof(counts)); memset(counts,0,sizeof(counts));


if(gui_flag) if(gui_flag)
//CharStats(); //CharStats();
} }
} }


#ifdef deleted
if(check_root)
{
// does this word match any suffixes ?
wlen = strlen(word);
for(sfx=0;(suffix = suffixes[sfx].suffix) != NULL; sfx++)
{
len = strlen(suffix);
if(len >= (wlen-2))
continue;

if(ru_stress > (vcount - suffixes[sfx].syllables))
continue;

if(strcmp(suffix,&word[wlen-len])==0)
{
strcpy(word2,word);
word2[wlen-len] = 0;
// fprintf(f_roots,"%s\t $%d\t\\ %s\n",word2,ru_stress,suffix);
fprintf(f_roots,"%s\t $%d\n",word2,ru_stress);
}
}
}
#endif
} }


fclose(f_in); fclose(f_in);


if(f_log != NULL) if(f_log != NULL)
{ {

#ifdef deleted
// list tables of frequency of stress position for words of different syllable lengths
int j,k;
for(ix=2; ix<12; ix++)
{
fprintf(f_log,"%2d syllables\n",ix);
for(k=0; k<10; k++)
{
fprintf(f_log," %2d :",k);
for(j=1; j<=ix; j++)
{
fprintf(f_log,"%6d ",counts[ix][j][k]);
}
fprintf(f_log,"\n");
}
fprintf(f_log,"\n\n");
}
#endif
fclose(f_log); fclose(f_log);
} }








#ifdef deleted
void Test2()
{
//
char buf[120];
FILE *f;
FILE *f_out;
unsigned char *p;

f = fopen("/home/jsd1/tmp1/list","r");
if(f == NULL) return;
f_out = fopen("/home/jsd1/tmp1/list_out","w");
if(f_out == NULL) return;

while(!feof(f))
{
if(fgets(buf,sizeof(buf),f) == NULL)
break;

p = (unsigned char *)buf;
while(*p > ' ') p++;
*p = 0;
fprintf(f_out,"%s . . .\n",buf);
}
fclose(f);
fclose(f_out);
}

#endif

#define MAX_WALPHA 0x24f #define MAX_WALPHA 0x24f
void Make_walpha_tab() void Make_walpha_tab()
{//=================== {//===================

+ 0
- 18
src/libespeak-ng/compiledict.c View File

p = linebuf; p = linebuf;
// while(isspace2(*p)) p++; // while(isspace2(*p)) p++;


#ifdef deleted
if(*p == '$')
{
if(memcmp(p,"$textmode",9) == 0)
{
text_mode = 1;
return(0);
}
if(memcmp(p,"$phonememode",12) == 0)
{
text_mode = 0;
return(0);
}
}
#endif

step = 0; step = 0;


c = 0; c = 0;
if(c == '\\') if(c == '\\')
{ {
c = *p++; // treat next character literally c = *p++; // treat next character literally
//#ifdef deleted
if((c >= '0') && (c <= '3') && (p[0] >= '0') && (p[0] <= '7') && (p[1] >= '0') && (p[1] <= '7')) if((c >= '0') && (c <= '3') && (p[0] >= '0') && (p[0] <= '7') && (p[1] >= '0') && (p[1] <= '7'))
{ {
// character code given by 3 digit octal value; // character code given by 3 digit octal value;
c = (c-'0')*64 + (p[0]-'0')*8 + (p[1]-'0'); c = (c-'0')*64 + (p[0]-'0')*8 + (p[1]-'0');
p += 2; p += 2;
} }
//endif
literal = 1; literal = 1;
} }
if(hexdigit_input) if(hexdigit_input)

+ 0
- 24
src/libespeak-ng/dictionary.c View File

ph = phoneme_tab[p[0]]; ph = phoneme_tab[p[0]];
} }


#ifdef deleted
int gap = tr->langopts.word_gap & 0x700;
if((gap) && (vowel_stress[1] >= 4) && (prev_stress >= 4))
{
/* two primary stresses together, insert a short pause */
*output++ = pause_phonemes[gap >> 8];
}
else
#endif
if((tr->langopts.vowel_pause & 0x30) && (ph->type == phVOWEL)) if((tr->langopts.vowel_pause & 0x30) && (ph->type == phVOWEL))
{ {
// word starts with a vowel // word starts with a vowel
return(0); return(0);
} }
} }
#ifdef deleted
// can't switch to a tone language, because the tone-phoneme numbers are not valid for the original language
if((letter >= 0x4e00) && (letter < 0xa000) && (tr->langopts.ideographs != 1))
{
// Chinese ideogram
sprintf(phonemes,"%czh",phonSWITCH);
return(0);
}
#endif


// is it a bracket ? // is it a bracket ?
if(letter == 0xe000+'(') if(letter == 0xe000+'(')
if(word_flags & FLAG_UNPRON_TEST) if(word_flags & FLAG_UNPRON_TEST)
return(match1.end_type | 1); return(match1.end_type | 1);


#ifdef deleted
// ?? allow $unpr while translating rules, not just on initial FLAG_UNPRON_TEST
if((match1.end_type & SUFX_UNPRON) && !(word_flags & FLAG_SUFFIX_REMOVED))
return(match1.end_type);
#endif

if((match1.phonemes[0] == phonSWITCH) && ((word_flags & FLAG_DONT_SWITCH_TRANSLATOR)==0)) if((match1.phonemes[0] == phonSWITCH) && ((word_flags & FLAG_DONT_SWITCH_TRANSLATOR)==0))
{ {
// an instruction to switch language, return immediately so we can re-translate // an instruction to switch language, return immediately so we can re-translate

+ 0
- 7
src/libespeak-ng/intonation.c View File

else else
calc_pitches(option, st_start, ix, group_tone); calc_pitches(option, st_start, ix, group_tone);


#ifdef deleted
if((ix < n_st) || (clause_type == 0))
calc_pitches(option, st_start, ix, group_tone_emph); // split into > 1 tone groups, use emphatic tone
else
calc_pitches(option, st_start, ix, group_tone);
#endif

st_start = ix; st_start = ix;
} }
if((st_start < st_ix) && (syl->flags & SYL_END_CLAUSE)) if((st_start < st_ix) && (syl->flags & SYL_END_CLAUSE))

+ 1
- 62
src/libespeak-ng/klatt.c View File







/*
function ANTIRESONATOR

This is a generic anti-resonator function. The code is the same as resonator
except that a,b,c need to be set with setzeroabc() and we save inputs in
p1/p2 rather than outputs. There is currently only one of these - "rnz"
Output = (rnz.a * input) + (rnz.b * oldin1) + (rnz.c * oldin2)
*/

#ifdef deleted
static double antiresonator(resonator_ptr r, double input)
{
register double x = (double)r->a * (double)input + (double)r->b * (double)r->p1 + (double)r->c * (double)r->p2;
r->p2 = (double)r->p1;
r->p1 = (double)input;
return (double)x;
}
#endif

static double antiresonator2(resonator_ptr r, double input) static double antiresonator2(resonator_ptr r, double input)
{ {
register double x = (double)r->a * (double)input + (double)r->b * (double)r->p1 + (double)r->c * (double)r->p2; register double x = (double)r->a * (double)input + (double)r->b * (double)r->p1 + (double)r->c * (double)r->p2;


out = outbypas - out; out = outbypas - out;


#ifdef deleted
// for testing
if (kt_globals.outsl != 0)
{
switch(kt_globals.outsl)
{
case 1:
out = voice;
break;
case 2:
out = aspiration;
break;
case 3:
out = frics;
break;
case 4:
out = glotout;
break;
case 5:
out = par_glotout;
break;
case 6:
out = outbypas;
break;
case 7:
out = sourc;
break;
}
}
#endif

out = resonator(&(kt_globals.rsn[Rout]),out); out = resonator(&(kt_globals.rsn[Rout]),out);
temp = (int)(out * wdata.amplitude * kt_globals.amp_gain0) ; /* Convert back to integer */ temp = (int)(out * wdata.amplitude * kt_globals.amp_gain0) ; /* Convert back to integer */




if(end_wave > 0) if(end_wave > 0)
{ {
#ifdef deleted
if(end_wave == 2)
{
fade = (kt_globals.T0 - kt_globals.nper)/4; // samples until end of current cycle
if(fade < 64)
fade = 64;
}
else
#endif
{
fade = 64; // not followd by formant synthesis
}
fade = 64; // not followd by formant synthesis


// fade out to avoid a click // fade out to avoid a click
kt_globals.fadeout = fade; kt_globals.fadeout = fade;

+ 0
- 14
src/libespeak-ng/setlengths.c View File

#endif // of INCLUDE_SONIC #endif // of INCLUDE_SONIC




#ifdef deleted
void SetAmplitude(int amp)
{//=======================
static unsigned char amplitude_factor[] = {0,5,6,7,9,11,14,17,21,26, 32, 38,44,50,56,63,70,77,84,91,100 };

if((amp >= 0) && (amp <= 20))
{
option_amplitude = (amplitude_factor[amp] * 480)/256;
}
}
#endif



void SetParameter(int parameter, int value, int relative) void SetParameter(int parameter, int value, int relative)
{//====================================================== {//======================================================
// parameter: reset-all, amp, pitch, speed, linelength, expression, capitals, number grouping // parameter: reset-all, amp, pitch, speed, linelength, expression, capitals, number grouping

+ 0
- 40
src/libespeak-ng/speak_lib.c View File

if((type == espeakEVENT_MARK) || (type == espeakEVENT_PLAY)) if((type == espeakEVENT_MARK) || (type == espeakEVENT_PLAY))
ep->id.name = &namedata[value]; ep->id.name = &namedata[value];
else else
//#ifdef deleted
// temporarily removed, don't introduce until after eSpeak version 1.46.02
if(type == espeakEVENT_PHONEME) if(type == espeakEVENT_PHONEME)
{ {
int *p; int *p;
p[1] = value2; p[1] = value2;
} }
else else
//#endif
{ {
ep->id.number = value; ep->id.number = value;
} }
{//============================================================ {//============================================================
ENTER("espeak_SetVoiceByName"); ENTER("espeak_SetVoiceByName");


//#ifdef USE_ASYNC
// I don't think there's a need to queue change voice requests
#ifdef deleted
espeak_ERROR a_error;

if(synchronous_mode)
{
return(SetVoiceByName(name));
}

t_espeak_command* c = create_espeak_voice_name(name);
a_error = fifo_add_command(c);
if (a_error != EE_OK)
{
delete_espeak_command(c);
}
return a_error;
#else
return(SetVoiceByName(name)); return(SetVoiceByName(name));
#endif
} // end of espeak_SetVoiceByName } // end of espeak_SetVoiceByName




{//============================================================================== {//==============================================================================
ENTER("espeak_SetVoiceByProperties"); ENTER("espeak_SetVoiceByProperties");


//#ifdef USE_ASYNC
#ifdef deleted
espeak_ERROR a_error;

if(synchronous_mode)
{
return(SetVoiceByProperties(voice_selector));
}

t_espeak_command* c = create_espeak_voice_spec( voice_selector);
a_error = fifo_add_command(c);
if (a_error != EE_OK)
{
delete_espeak_command(c);
}
return a_error;
#else
return(SetVoiceByProperties(voice_selector)); return(SetVoiceByProperties(voice_selector));
#endif
} // end of espeak_SetVoiceByProperties } // end of espeak_SetVoiceByProperties





+ 0
- 18
src/libespeak-ng/tr_languages.c View File

} }
break; break;


#ifdef deleted
case L('t','h'): // Thai
{
static const short stress_lengths_th[8] = {230,150, 230,230, 230,0, 230,250};
static const unsigned char stress_amps_th[] = {22,16, 22,22, 22,22, 22,22 };

SetupTranslator(tr,stress_lengths_th,stress_amps_th);

tr->langopts.stress_rule = 0; // stress on final syllable of a "word"
tr->langopts.stress_flags = S_NO_DIM; // don't automatically set diminished stress (may be set in the intonation module)
tr->langopts.tone_language = 1; // Tone language, use CalcPitches_Tone() rather than CalcPitches()
tr->langopts.length_mods0 = tr->langopts.length_mods; // don't lengthen vowels in the last syllable
// tr->langopts.tone_numbers = 1; // a number after letters indicates a tone number (eg. pinyin or jyutping)
tr->langopts.word_gap = 0x21; // length of a final vowel is less dependent on the next consonant, don't merge consonant with next word
}
break;
#endif

case L('t','r'): // Turkish case L('t','r'): // Turkish
case L('a','z'): // Azerbaijan case L('a','z'): // Azerbaijan
{ {

+ 0
- 55
src/libespeak-ng/translate.c View File

return(0); return(0);
} }


#ifdef deleted
p = &wordx[word_length-3]; // this looks wrong. Doesn't consider multi-byte chars.
if(memcmp(p,"'s ",3) == 0)
{
// remove a 's suffix and pronounce this separately (not as an individual letter)
add_plural_suffix = 1;
p[0] = ' ';
p[1] = ' ';
last_char = p[-1];
}
#endif
length=0; length=0;
while(wordx[length] != ' ') length++; while(wordx[length] != ' ') length++;
} }
return(0); return(0);
} }


#ifdef deleted
// ?? allow $unpr while translating rules, not just on initial FLAG_UNPRON_TEST
if(end_type & SUFX_UNPRON)
{
phonemes[0] = 0; // discard and retranslate as individual letters
SpeakIndividualLetters(tr, wordx, phonemes, 0);
strcpy(word_phonemes, phonemes);
return(0);
}
#endif

if((phonemes[0] == 0) && (end_phonemes[0] == 0)) if((phonemes[0] == 0) && (end_phonemes[0] == 0))
{ {
int wc; int wc;
dictionary_flags[0] &= ~FLAG_PAUSE1; dictionary_flags[0] &= ~FLAG_PAUSE1;
} }


#ifdef deleted
// but it causes problems if these are not a person name
if(tr->translator_name == L('h','u'))
{
// lang=hu, If the last two words of a clause have capital letters (eg. a person name), unstress the last word.
if((wflags & (FLAG_LAST_WORD | FLAG_FIRST_UPPER | FLAG_ALL_UPPER | FLAG_FIRST_WORD)) == (FLAG_LAST_WORD | FLAG_FIRST_UPPER))
{
if(((wtab[-1].flags & (FLAG_FIRST_UPPER | FLAG_ALL_UPPER)) == FLAG_FIRST_UPPER) && ((tr->clause_terminator != 0x90028) || (wflags & FLAG_HAS_DOT)))
{
ChangeWordStress(tr,word_phonemes,3);
}
}
}
#endif

if((wflags & FLAG_HYPHEN) && (tr->langopts.stress_flags & S_HYPEN_UNSTRESS)) if((wflags & FLAG_HYPHEN) && (tr->langopts.stress_flags & S_HYPEN_UNSTRESS))
{ {
ChangeWordStress(tr,word_phonemes,3); ChangeWordStress(tr,word_phonemes,3);
{ {
// speak as words // speak as words


#ifdef deleted
if((c == '/') && (tr->langopts.testing & 2) && IsDigit09(next_in) && IsAlpha(prev_out))
{
// TESTING, explicit indication of stressed syllable by /2 after the word
word_mark = next_in-'0';
source_index++;
c = ' ';
}
#endif
if((c == 0x92) || (c == 0xb4) || (c == 0x2019) || (c == 0x2032)) if((c == 0x92) || (c == 0xb4) || (c == 0x2019) || (c == 0x2032))
c = '\''; // 'microsoft' quote or sexed closing single quote, or prime - possibly used as apostrophe c = '\''; // 'microsoft' quote or sexed closing single quote, or prime - possibly used as apostrophe


} }
} }
else else
#ifdef deleted
// Brackets are now recognised in TranslateRules()
if(IsBracket(c))
{
pre_pause_add = 4;
c = ' ';
}
else
#endif
if(lookupwchar(breaks,c) != 0) if(lookupwchar(breaks,c) != 0)
{ {
c = ' '; // various characters to treat as space c = ' '; // various characters to treat as space

+ 0
- 37
src/libespeak-ng/wavegen.c View File

static unsigned char *pk_shape; static unsigned char *pk_shape;




static void WavegenInitPkData(int which)
{//=====================================
// this is only needed to set up the presets for pk_shape1 and pk_shape2
// These have already been pre-calculated and preset
#ifdef deleted
int ix;
int p;
float x;
float y[PEAKSHAPEW];
float maxy=0;

if(which==0)
pk_shape = pk_shape1;
else
pk_shape = pk_shape2;

p = 0;
for(ix=0;ix<PEAKSHAPEW;ix++)
{
x = (4.5*ix)/PEAKSHAPEW;
if(x >= pk_shape_x[which][p+3]) p++;
y[ix] = polint(&pk_shape_x[which][p],&pk_shape_y[which][p],3,x);
if(y[ix] > maxy) maxy = y[ix];
}
for(ix=0;ix<PEAKSHAPEW;ix++)
{
p = (int)(y[ix]*255/maxy);
pk_shape[ix] = (p >= 0) ? p : 0;
}
pk_shape[PEAKSHAPEW]=0;
#endif
} // end of WavegenInitPkData



#ifdef USE_PORTAUDIO #ifdef USE_PORTAUDIO
// PortAudio interface // PortAudio interface


} }
} }


WavegenInitPkData(1);
WavegenInitPkData(0);
pk_shape = pk_shape2; // pk_shape2 pk_shape = pk_shape2; // pk_shape2


#ifdef INCLUDE_KLATT #ifdef INCLUDE_KLATT

+ 0
- 15
src/prosodydisplay.cpp View File

string = wxString(envelope_names[ix].mnem, wxConvLocal); string = wxString(envelope_names[ix].mnem, wxConvLocal);
menu_envelopes->Append(0x100+envelope_names[ix].value, string); menu_envelopes->Append(0x100+envelope_names[ix].value, string);
} }
#ifdef deleted
menu_envelopes->Append(0x100,_T("fall"));
menu_envelopes->Append(0x102,_T("rise"));
menu_envelopes->Append(0x104,_T("fall-rise"));
// menu_envelopes->Append(0x105,_T("fall-rise (R)"));
menu_envelopes->Append(0x106,_T("fall-rise 2"));
// menu_envelopes->Append(0x107,_T("fall-rise 2(R)"));
menu_envelopes->Append(0x108,_T("rise-fall"));

menu_envelopes->Append(0x10a,_T("fall-rise 3"));
menu_envelopes->Append(0x10c,_T("fall-rise 4"));
menu_envelopes->Append(0x10e,_T("fall 2"));
menu_envelopes->Append(0x110,_T("rise 2"));
menu_envelopes->Append(0x112,_T("rise-fall-rise"));
#endif
menu_prosody = new wxMenu; menu_prosody = new wxMenu;
menu_prosody->Append(1,_T("Pitch envelope"),menu_envelopes); menu_prosody->Append(1,_T("Pitch envelope"),menu_envelopes);
menu_prosody->Append(2,_T("Amplitude")); menu_prosody->Append(2,_T("Amplitude"));

+ 0
- 1
src/spect.h View File

private: private:
void RefreshFrame(int frame); void RefreshFrame(int frame);
void SetKeyframe(SpectFrame *sf, int yes); void SetKeyframe(SpectFrame *sf, int yes);
void PlayChild(int number, PitchEnvelope pitchenv);
void SetExtent(); void SetExtent();
int sframe; int sframe;
int pk_num; int pk_num;

+ 1
- 23
src/spectdisplay.cpp View File








void SpectDisplay::PlayChild(int number, PitchEnvelope pitchenv)
{//=========================================================
#ifdef deleted
SpectSeq *seq;

if(number >= canvaslistix) return;

if((seq = canvaslist[number]->spectseq) == NULL)
return;

ReadDialogValues();
seq->MakeWave(0,seq->numframes-1,pitchenv);
#endif
} // end of PlayChild


void SpectDisplay::SetKeyframe(SpectFrame *sf, int yes) void SpectDisplay::SetKeyframe(SpectFrame *sf, int yes)
{//==================================================== {//====================================================
if(sf->keyframe == yes) return; // already done if(sf->keyframe == yes) return; // already done
if(key>='0' && key<='9') if(key>='0' && key<='9')
{ {
i = key-'0'; i = key-'0';
if(event.ControlDown())
{
if(i==0) i=10;
PlayChild(i-1,voicedlg->pitchenv);
}
else
if(!event.ControlDown())
{ {
// select peak number // select peak number
if((pk_num = i) >= N_PEAKS) pk_num = N_PEAKS-1; if((pk_num = i) >= N_PEAKS) pk_num = N_PEAKS-1;

+ 0
- 53
src/transldlg.cpp View File

wxTextAttr style_phonetic_large; wxTextAttr style_phonetic_large;




#ifdef deleted

class IPATextCtrl : public wxTextCtrl
{
public:
void OnKey(wxKeyEvent& event);
IPATextCtrl(wxWindow *parent,wxWindowID id,const wxPoint& pos,const wxSize& size);

DECLARE_EVENT_TABLE()
};

BEGIN_EVENT_TABLE(IPATextCtrl, wxTextCtrl)
EVT_CHAR(IPATextCtrl::OnKey)
END_EVENT_TABLE()


IPATextCtrl::IPATextCtrl(wxWindow *parent,wxWindowID id,const wxPoint& pos,const wxSize& size) :
wxTextCtrl(parent,id,_T(""),pos,size,wxTE_MULTILINE)
{
wxTextAttr attr;
wxFont font = wxFont(12,wxFONTFAMILY_ROMAN,wxFONTSTYLE_NORMAL,wxFONTWEIGHT_LIGHT);

attr.SetFont(font);
SetDefaultStyle(attr);
}

void IPATextCtrl::OnKey(wxKeyEvent& event)
{
long key;
wchar_t wbuf[3];

key = event.m_keyCode;
key = event.GetKeyCode();

if(event.ControlDown())
key = key & 0x1f;

if(key==0xd && !event.ControlDown())
event.Skip(); // ENTER
else
if(key != 8 && key < 0x7f)
{
wbuf[0] = ipa1[key];
wbuf[1] = 0;
WriteText(wxString(wbuf));
}
else
event.Skip();
}
#endif



void PlayWavFile(const char *fname) void PlayWavFile(const char *fname)
{//================================ {//================================
char command[120]; char command[120];

+ 0
- 68
src/voicedlg.cpp View File

ReadParams(); ReadParams();
} }


#ifdef deleted
void VoiceDlg::Save()
{//==================
int pk;
wxString filename;
FILE *f;
filename = wxFileSelector(_T("Save voice"),path_voices,_T(""),_T(""),_T("*"),wxSAVE);
if(filename.IsEmpty())
return;

f = fopen(filename.mb_str(wxConvLocal),"w");
if(f == NULL)
{
wxLogError(_T("Failed to open '%s'"),filename.c_str());
return;
}

wxFileName fn(filename);
path_voices = fn.GetPath();
ReadParams();
fprintf(f,"%3d %3d\n",vd_pitch1->GetValue(),vd_pitch2->GetValue());
for(pk=0; pk<N_PEAKS+1; pk++)
{
fprintf(f,"%3d %3d %3d\n",voice_pcnt[pk][0],voice_pcnt[pk][1],voice_pcnt[pk][2]);
}
fclose(f);
} // end of VoiceDlg::Save


void VoiceDlg::Load()
{//===============================
wxString filename;
int pk, j;
filename = wxFileSelector(_T("Load voice"),path_voices,_T(""),_T(""),_T("*"),wxOPEN);
if(filename.IsEmpty()) return;

wxFileInputStream stream(filename);

if(stream.Ok() == FALSE)
{
wxLogError(_T("Failed to open '%s'"),filename.c_str());
return;
}

wxFileName fn(filename);
path_voices = fn.GetPath();
wxTextInputStream s(stream);

s >> pitch1;
s >> pitch2;
for(pk=0; pk<N_PEAKS+1; pk++)
{
for(j=0; j<3; j++)
{
s >> voice_pcnt[pk][j];
SetSpinCtrl(vd[pk][j],voice_pcnt[pk][j]);
}
}

SetSpinCtrl(voicedlg->vd_pitch1,pitch1);
SetSpinCtrl(voicedlg->vd_pitch2,pitch2);
ReadParams();
} // end of VoiceDlg::Load
#endif


void VoiceDlg::SetFromSpect(SpectSeq* spect) void VoiceDlg::SetFromSpect(SpectSeq* spect)
{//========================================= {//=========================================

Loading…
Cancel
Save