Untitled

 avatar
unknown
plain_text
4 months ago
7.5 kB
2
Indexable
def classify_image(self, image_path):
        try:
            print("\n=== Starting Image Classification ===")
            
            # Check internet connection
            has_internet = self.check_internet_connection()
            
            # Get root window and loading screen reference
            root = next(iter(CircularProgress._instances)).winfo_toplevel()
            language = getattr(root, 'LANGUAGE', 'EN')
            
            def attempt_openai_classification(loading_screen):
                try:
                    base64_image = self.encode_image(image_path)
                    prompt = self.bin_config.get_ai_prompt(language)

                    # Create Event for timeout handling and thread cancellation
                    response_received = threading.Event()
                    cancel_thread = threading.Event()
                    api_response = {'result': None, 'error': None}

                    def api_call():
                        try:
                            # Check if we're already cancelled
                            if cancel_thread.is_set():
                                return

                            response = self.client.chat.completions.create(
                                model="gpt-4-vision-preview",
                                messages=[
                                    {
                                        "role": "user",
                                        "content": [
                                            {"type": "text", "text": prompt},
                                            {
                                                "type": "image_url",
                                                "image_url": {
                                                    "url": f"data:image/jpeg;base64,{base64_image}"
                                                }
                                            }
                                        ]
                                    }
                                ],
                                max_tokens=300,
                                timeout=10  # 10 second timeout for the API call
                            )

                            # Check if we were cancelled while waiting for response
                            if cancel_thread.is_set():
                                return

                            api_response['result'] = response.choices[0].message.content.strip()
                            print('Completion Tokens:', response.usage.completion_tokens)
                            print('Prompt Tokens:', response.usage.prompt_tokens)
                            print('Total Tokens:', response.usage.total_tokens)
                            print(f"\nAI Response: {api_response['result']}")
                            response_received.set()
                        except Exception as e:
                            if not cancel_thread.is_set():  # Only set error if we weren't cancelled
                                api_response['error'] = str(e)
                                response_received.set()

                    # Start API call in separate thread
                    api_thread = threading.Thread(target=api_call)
                    api_thread.daemon = True
                    api_thread.start()

                    # Wait for response with timeout
                    if not response_received.wait(15):  # 15 second total timeout
                        print("\nAPI timeout occurred - switching to local model")
                        # Signal thread to cancel and ignore any late responses
                        cancel_thread.set()
                        loading_screen.update_message(
                            TRANSLATIONS[language].get('api_timeout', 'API taking too long. Switching to local model...')
                        )
                        time.sleep(2)  # Give user time to read the message
                        return None, "timeout"

                    # Check if we were cancelled
                    if cancel_thread.is_set():
                        return None, "cancelled"

                    if api_response['error']:
                        print(f"\nAPI error occurred: {api_response['error']}")
                        raise Exception(api_response['error'])

                    return api_response['result'], None

                except Exception as e:
                    print(f"\nError in OpenAI classification: {str(e)}")
                    return None, str(e)

            if has_internet and self.client:
                print("Using OpenAI API classification")
                
                # Create loading screen with initial message
                loading_screen = LoadingScreen(
                    root,
                    message=TRANSLATIONS[language]['classifying_image'],
                    dark_mode=getattr(root, 'DARK_MODE', False),
                    language=language
                )

                try:
                    # Attempt OpenAI classification
                    result, error = attempt_openai_classification(loading_screen)
                    
                    if result:
                        loading_screen.destroy()
                        print(f"\nClassification Result: {result}")
                        return result
                    else:
                        print("\nFalling back to local model")
                        loading_screen.update_message(
                            TRANSLATIONS[language].get('using_local_model', 'Using local model for classification...')
                        )
                        time.sleep(1)
                        result = self.classify_with_local_model(image_path)
                        loading_screen.destroy()
                        print(f"\nLocal Model Classification Result: {result}")
                        return result
                except Exception as e:
                    print(f"\nError during classification process: {str(e)}")
                    loading_screen.destroy()
                    raise
            else:
                print("Using local model classification")
                if not self.interpreter:
                    raise Exception("Local model not initialized")
                    
                loading_screen = LoadingScreen(
                    root,
                    message=TRANSLATIONS[language].get('using_local_model', 'Using local model for classification...'),
                    dark_mode=getattr(root, 'DARK_MODE', False),
                    language=language
                )
                try:
                    result = self.classify_with_local_model(image_path)
                    loading_screen.destroy()
                    print(f"\nLocal Model Classification Result: {result}")
                    return result
                except Exception as e:
                    print(f"\nError during local classification: {str(e)}")
                    loading_screen.destroy()
                    raise
                    
            print("=== Classification Complete ===\n")
                
        except Exception as e:
            print(f"\n!!! Error during classification: {str(e)}")
            raise
        finally:
            self.cleanup_temp_files()
Editor is loading...
Leave a Comment