Untitled

mail@pastecode.io avatar
unknown
plain_text
7 months ago
6.6 kB
3
Indexable
Never
---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
Input In [2], in <cell line: 3>()
      1 import spacy
      2 # Load the English pre-trained model with NER
----> 3 nlp = spacy.load('en_core_web_sm')

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy/__init__.py:30, in load(name, **overrides)
     28 if depr_path not in (True, False, None):
     29     warnings.warn(Warnings.W001.format(path=depr_path), DeprecationWarning)
---> 30 return util.load_model(name, **overrides)

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy/util.py:170, in load_model(name, **overrides)
    168     return load_model_from_link(name, **overrides)
    169 if is_package(name):  # installed as package
--> 170     return load_model_from_package(name, **overrides)
    171 if Path(name).exists():  # path to model data directory
    172     return load_model_from_path(Path(name), **overrides)

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy/util.py:191, in load_model_from_package(name, **overrides)
    189 """Load a model from an installed package."""
    190 cls = importlib.import_module(name)
--> 191 return cls.load(**overrides)

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/en_core_web_sm/__init__.py:12, in load(**overrides)
     11 def load(**overrides):
---> 12     return load_model_from_init_py(__file__, **overrides)

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy/util.py:239, in load_model_from_init_py(init_file, **overrides)
    237 if not model_path.exists():
    238     raise IOError(Errors.E052.format(path=path2str(data_path)))
--> 239 return load_model_from_path(data_path, meta, **overrides)

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy/util.py:203, in load_model_from_path(model_path, meta, **overrides)
    201 lang = meta.get("lang_factory", meta["lang"])
    202 cls = get_lang_class(lang)
--> 203 nlp = cls(meta=meta, **overrides)
    204 pipeline = meta.get("pipeline", [])
    205 factories = meta.get("factories", {})

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy/language.py:171, in Language.__init__(self, vocab, make_doc, max_length, meta, **kwargs)
    149 def __init__(
    150     self, vocab=True, make_doc=True, max_length=10 ** 6, meta={}, **kwargs
    151 ):
    152     """Initialise a Language object.
    153 
    154     vocab (Vocab): A `Vocab` object. If `True`, a vocab is created via
   (...)
    169     RETURNS (Language): The newly constructed object.
    170     """
--> 171     user_factories = util.registry.factories.get_all()
    172     self.factories.update(user_factories)
    173     self._meta = dict(meta)

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/catalogue/__init__.py:112, in Registry.get_all(self)
    110 result = OrderedDict()
    111 if self.entry_points:
--> 112     result.update(self.get_entry_points())
    113 for keys, value in REGISTRY.items():
    114     if len(self.namespace) == len(keys) - 1 and all(
    115         self.namespace[i] == keys[i] for i in range(len(self.namespace))
    116     ):

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/catalogue/__init__.py:127, in Registry.get_entry_points(self)
    125 result = {}
    126 for entry_point in self._get_entry_points():
--> 127     result[entry_point.name] = entry_point.load()
    128 return result

File /Analytics/python3/lib/python3.8/importlib/metadata.py:77, in EntryPoint.load(self)
     72 """Load the entry point from its definition. If only a module
     73 is indicated by the value, return that module. Otherwise,
     74 return the named object.
     75 """
     76 match = self.pattern.match(self.value)
---> 77 module = import_module(match.group('module'))
     78 attrs = filter(None, (match.group('attr') or '').split('.'))
     79 return functools.reduce(getattr, attrs, module)

File /Analytics/python3/lib/python3.8/importlib/__init__.py:127, in import_module(name, package)
    125             break
    126         level += 1
--> 127 return _bootstrap._gcd_import(name[level:], package, level)

File <frozen importlib._bootstrap>:1014, in _gcd_import(name, package, level)

File <frozen importlib._bootstrap>:991, in _find_and_load(name, import_)

File <frozen importlib._bootstrap>:975, in _find_and_load_unlocked(name, import_)

File <frozen importlib._bootstrap>:671, in _load_unlocked(spec)

File <frozen importlib._bootstrap_external>:848, in exec_module(self, module)

File <frozen importlib._bootstrap>:219, in _call_with_frames_removed(f, *args, **kwds)

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy_transformers/__init__.py:1, in <module>
----> 1 from .language import TransformersLanguage
      2 from .pipeline.tok2vec import TransformersTok2Vec  # noqa
      3 from .pipeline.textcat import TransformersTextCategorizer  # noqa

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy_transformers/language.py:5, in <module>
      3 from spacy.util import get_lang_class
      4 from spacy.gold import GoldParse
----> 5 from .util import is_special_token, pkg_meta, ATTRS, PIPES, LANG_FACTORY
      8 class TransformersLanguage(Language):
      9     """A subclass of spacy.Language that holds a Transformer pipeline.
     10 
     11     Transformer pipelines work only slightly differently from spaCy's default
   (...)
     15     and backprop it after the other components are done.
     16     """

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy_transformers/util.py:6, in <module>
      3 import numpy
      4 from spacy.tokens import Doc, Span
----> 6 from . import _tokenizers
      8 try:
      9     # This allows us to use cupy with mypy, for type checking
     10     import cupy  # noqa

File /Analytics/venv/CAPEANALYTICS/lib/python3.8/site-packages/spacy_transformers/_tokenizers.py:20, in <module>
     17 import re
     19 import transformers
---> 20 from transformers.tokenization_gpt2 import bytes_to_unicode
     21 from transformers.tokenization_bert import BasicTokenizer, WordpieceTokenizer
     24 BASE_CLASS_FIELDS = [
     25     "_bos_token",
     26     "_eos_token",
   (...)
     37     "unique_added_tokens_encoder_list",
     38 ]

ModuleNotFoundError: No module named 'transformers.tokenization_gpt2'
Leave a Comment