Cleanup and some minor changes

This commit is contained in:
Gregory John Casamento 2020-02-06 02:35:53 -05:00
parent 92e00382c0
commit a9bc3fe5d9
2 changed files with 34 additions and 22 deletions

View file

@ -40,15 +40,17 @@ BOOL _serverLaunchTested = NO;
#define SPEECH_RECOGNITION_SERVER @"GSSpeechRecognitionServer"
@interface NSObject (GSSpeechRecognitionServer)
- (NSSpeechRecognizer *)newRecognizer;
- (NSSpeechRecognizer *) newRecognizer;
@end
@implementation NSSpeechRecognizer
+ (void) initialize
{
_speechRecognitionServer = [[NSConnection rootProxyForConnectionWithRegisteredName: SPEECH_RECOGNITION_SERVER
host: nil] retain];
_speechRecognitionServer = [NSConnection
rootProxyForConnectionWithRegisteredName: SPEECH_RECOGNITION_SERVER
host: nil];
RETAIN(_speechRecognitionServer);
if (nil == _speechRecognitionServer)
{
NSWorkspace *ws = [NSWorkspace sharedWorkspace];
@ -56,6 +58,10 @@ BOOL _serverLaunchTested = NO;
showIcon: NO
autolaunch: NO];
}
else
{
NSLog(@"Server found in +initialize");
}
}
- (void) processNotification: (NSNotification *)note
@ -63,9 +69,11 @@ BOOL _serverLaunchTested = NO;
NSString *word = (NSString *)[note object];
NSEnumerator *en = [_commands objectEnumerator];
id obj = nil;
word = [word lowercaseString];
while ((obj = [en nextObject]) != nil)
{
if ([[obj lowercaseString] isEqualToString: [word lowercaseString]])
if ([[obj lowercaseString] isEqualToString: word])
{
[_delegate speechRecognizer: self
didRecognizeCommand: word];
@ -95,14 +103,17 @@ BOOL _serverLaunchTested = NO;
if (nil == _speechRecognitionServer && !_serverLaunchTested)
{
unsigned int i = 0;
// Wait for up to five seconds for the server to launch, then give up.
for (i=0 ; i < 50 ; i++)
{
_speechRecognitionServer = [[NSConnection rootProxyForConnectionWithRegisteredName: SPEECH_RECOGNITION_SERVER
host: nil] retain];
_speechRecognitionServer = [NSConnection
rootProxyForConnectionWithRegisteredName: SPEECH_RECOGNITION_SERVER
host: nil];
RETAIN(_speechRecognitionServer);
if (nil != _speechRecognitionServer)
{
NSLog(@"No server!!!");
NSLog(@"Server found!!!");
break;
}
[NSThread sleepForTimeInterval: 0.1];
@ -112,9 +123,11 @@ BOOL _serverLaunchTested = NO;
// launch the next time if it didn't work this time.
_serverLaunchTested = YES;
}
// If there is no server, this will return nil
return [_speechRecognitionServer newRecognizer];
}
return [super allocWithZone: aZone];
}
@ -173,12 +186,17 @@ BOOL _serverLaunchTested = NO;
// Listening
- (void) startListening
{
[_speechRecognitionServer startListening];
if (_speechRecognitionServer != nil)
{
[_speechRecognitionServer startListening];
}
}
- (void) stopListening
{
[_speechRecognitionServer stopListening];
if (_speechRecognitionServer != nil)
{
[_speechRecognitionServer stopListening];
}
}
@end

View file

@ -54,10 +54,6 @@ static const arg_t cont_args_def[] = {
@implementation PocketsphinxSpeechRecognitionEngine
+ (void)initialize
{
}
- (id)init
{
if ((self = [super init]) != nil)
@ -93,7 +89,7 @@ static const arg_t cont_args_def[] = {
* NOTE: This code is derived from continuous.c under pocketsphinx
* which is MIT licensed
* Main utterance processing loop:
* for (;;) {
* while (YES) {
* start utterance and wait for speech to process
* decoding till end-of-utterance silence will be detected
* print utterance result;
@ -157,7 +153,6 @@ static const arg_t cont_args_def[] = {
withObject: recognizedString
waitUntilDone: NO];
NSDebugLog(@"RECOGNIZED WORD: %s", hyp);
fflush(stdout);
}
if (ps_start_utt(ps) < 0)
@ -175,12 +170,11 @@ static const arg_t cont_args_def[] = {
- (void) startListening
{
/*
_listeningThread = [[NSThread alloc] initWithTarget: self
selector: @selector(recognize)
object: nil];
[_listeningThread start];*/
[self recognize];
_listeningThread =
[[NSThread alloc] initWithTarget: self
selector: @selector(recognize)
object: nil];
[_listeningThread start];
}
- (void) stopListening