From 57fc86394f650e1a1d531d5329ef412fba9a7591 Mon Sep 17 00:00:00 2001 From: Wannaphong Phatthiyaphaibun Date: Tue, 18 Jul 2023 09:21:57 -0700 Subject: [PATCH 1/2] Add pythainlp.el --- docs/api/el.rst | 8 ++ docs/notes/installation.rst | 1 + notebooks/test_el.ipynb | 144 ++++++++++++++++++++++++++++++++++++ pythainlp/el/__init__.py | 21 ++++++ pythainlp/el/_multiel.py | 33 +++++++++ pythainlp/el/core.py | 57 ++++++++++++++ setup.py | 3 + 7 files changed, 267 insertions(+) create mode 100644 docs/api/el.rst create mode 100644 notebooks/test_el.ipynb create mode 100644 pythainlp/el/__init__.py create mode 100644 pythainlp/el/_multiel.py create mode 100644 pythainlp/el/core.py diff --git a/docs/api/el.rst b/docs/api/el.rst new file mode 100644 index 000000000..bd88abc15 --- /dev/null +++ b/docs/api/el.rst @@ -0,0 +1,8 @@ +.. currentmodule:: pythainlp.el + +pythainlp.el +============ +The :class:`pythainlp.el` is Thai Entity Linking with PyThaiNLP. + +.. autoclass:: EntityLinker + :members: diff --git a/docs/notes/installation.rst b/docs/notes/installation.rst index b8d596482..81cdd5051 100644 --- a/docs/notes/installation.rst +++ b/docs/notes/installation.rst @@ -38,6 +38,7 @@ where ``extras`` can be - ``coreference_resolution`` (to support coreference esolution with all engine) - ``wangchanglm`` (to support wangchanglm model) - ``wsd`` (to support pythainlp.wsd) + - ``el`` (to support pythainlp.el) - ``full`` (install everything) For dependency details, look at `extras` variable in `setup.py `_. diff --git a/notebooks/test_el.ipynb b/notebooks/test_el.ipynb new file mode 100644 index 000000000..da6060583 --- /dev/null +++ b/notebooks/test_el.ipynb @@ -0,0 +1,144 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "c09e8c4b-9cb7-485b-a4f9-c04693a36e06", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"1\"" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "062b3aa5-b3ff-4f1f-b126-3036dbea821d", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "from pythainlp.el import EntityLinker" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "aa91fee8-5574-424b-9058-7312f39a2abe", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.8/dist-packages/hydra/experimental/initialize.py:80: UserWarning: hydra.experimental.initialize_config_module() is no longer experimental. Use hydra.initialize_config_module().\n", + " deprecation_warning(message=message)\n", + "/usr/local/lib/python3.8/dist-packages/hydra/experimental/initialize.py:82: UserWarning: \n", + "The version_base parameter is not specified.\n", + "Please specify a compatability version level, or None.\n", + "Will assume defaults for version 1.1\n", + " self.delegate = real_initialize_config_module(\n", + "/usr/local/lib/python3.8/dist-packages/hydra/experimental/compose.py:25: UserWarning: hydra.experimental.compose() is no longer experimental. Use hydra.compose()\n", + " deprecation_warning(message=message)\n", + "/usr/local/lib/python3.8/dist-packages/hydra/_internal/defaults_list.py:251: UserWarning: In 'joint_el_mel_new': Defaults list is missing `_self_`. See https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/default_composition_order for more information\n", + " warnings.warn(msg, UserWarning)\n", + "/usr/local/lib/python3.8/dist-packages/hydra/core/default_element.py:124: UserWarning: In 'trainer/gpu_1_host': Usage of deprecated keyword in package header '# @package _group_'.\n", + "See https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/changes_to_package_header for more information\n", + " deprecation_warning(\n", + "/usr/local/lib/python3.8/dist-packages/hydra/core/default_element.py:124: UserWarning: In 'task/optim/adamw': Usage of deprecated keyword in package header '# @package _group_'.\n", + "See https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/changes_to_package_header for more information\n", + " deprecation_warning(\n", + "/usr/local/lib/python3.8/dist-packages/hydra/core/default_element.py:124: UserWarning: In 'task/model/xlmr_large': Usage of deprecated keyword in package header '# @package _group_'.\n", + "See https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/changes_to_package_header for more information\n", + " deprecation_warning(\n", + "/usr/local/lib/python3.8/dist-packages/hydra/core/default_element.py:124: UserWarning: In 'task/transform/joint_el_xlmr_raw_transform_large': Usage of deprecated keyword in package header '# @package _group_'.\n", + "See https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/changes_to_package_header for more information\n", + " deprecation_warning(\n", + "/usr/local/lib/python3.8/dist-packages/hydra/core/default_element.py:124: UserWarning: In 'datamodule/joint_el_datamodule': Usage of deprecated keyword in package header '# @package _group_'.\n", + "See https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/changes_to_package_header for more information\n", + " deprecation_warning(\n", + "/usr/local/lib/python3.8/dist-packages/hydra/core/default_element.py:124: UserWarning: In 'checkpoint_callback/default': Usage of deprecated keyword in package header '# @package _group_'.\n", + "See https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/changes_to_package_header for more information\n", + " deprecation_warning(\n", + "/usr/local/lib/python3.8/dist-packages/hydra/core/default_element.py:124: UserWarning: In 'task/joint_el_task': Usage of deprecated keyword in package header '# @package _group_'.\n", + "See https://hydra.cc/docs/1.2/upgrades/1.0_to_1.1/changes_to_package_header for more information\n", + " deprecation_warning(\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Setting ds_accelerator to cuda (auto detect)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Some weights of the model checkpoint at xlm-roberta-large were not used when initializing XLMRobertaModel: ['lm_head.bias', 'lm_head.layer_norm.weight', 'lm_head.dense.weight', 'lm_head.layer_norm.bias', 'lm_head.dense.bias']\n", + "- This IS expected if you are initializing XLMRobertaModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "- This IS NOT expected if you are initializing XLMRobertaModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "/usr/local/lib/python3.8/dist-packages/faiss/contrib/torch_utils.py:51: UserWarning: TypedStorage is deprecated. It will be removed in the future and UntypedStorage will be the only storage class. This should only matter to you if you are using storages directly. To access UntypedStorage directly, use tensor.untyped_storage() instead of tensor.storage()\n", + " x.storage().data_ptr() + x.storage_offset() * 4)\n" + ] + } + ], + "source": [ + "el = EntityLinker(device=\"cuda\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "9cd853bc-dea8-4579-affe-6db85596fcae", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'offsets': [11, 23],\n", + " 'lengths': [6, 7],\n", + " 'entities': ['Q484876', 'Q312'],\n", + " 'md_scores': [0.30301809310913086, 0.6399497389793396],\n", + " 'el_scores': [0.7142490744590759, 0.8657019734382629]}]" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "el.get_el(\"จ๊อบเคยเป็นซีอีโอบริษัทแอปเปิล\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/pythainlp/el/__init__.py b/pythainlp/el/__init__.py new file mode 100644 index 000000000..2a0705402 --- /dev/null +++ b/pythainlp/el/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2016-2023 PyThaiNLP Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +pythainlp.el +""" + +__all__ = ["EntityLinker"] + +from pythainlp.el.core import EntityLinker diff --git a/pythainlp/el/_multiel.py b/pythainlp/el/_multiel.py new file mode 100644 index 000000000..216f9f6b9 --- /dev/null +++ b/pythainlp/el/_multiel.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2016-2023 PyThaiNLP Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class MultiEL: + def __init__(self, model_name="bela", device="cuda"): + self.model_name = model_name + self.device = device + self.load_model() + def load_model(self): + try: + from multiel import BELA + except ImportError: + raise ImportError( + "Can't import multiel package, you can install by pip install multiel." + ) + self._bela_run = BELA(device=self.device) + def process_batch(self, list_text): + if isinstance(list_text, str): + list_text = [list_text] + return self._bela_run.process_batch(list_text) \ No newline at end of file diff --git a/pythainlp/el/core.py b/pythainlp/el/core.py new file mode 100644 index 000000000..e96f3f5a9 --- /dev/null +++ b/pythainlp/el/core.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2016-2023 PyThaiNLP Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import List, Union + + +class EntityLinker: + def __init__(self, model_name:str="bela", device:str="cuda", tag:str="wikidata"): + """ + EntityLinker + + :param str model_name: model name (bela) + :param str device: device for running model + :param str tag: Entity linking tag (wikidata) + """ + self.model_name = model_name + self.device = device + self.tag = tag + if self.model_name not in ["bela"]: + raise NotImplementedError(f"EntityLinker doesn't support {model_name} model.") + if self.tag not in ["wikidata"]: + raise NotImplementedError(f"EntityLinker doesn't support {tag} tag.") + from pythainlp.el._multiel import MultiEL + self.model = MultiEL(model_name=self.model_name, device=self.device) + def get_el(self, list_text:Union[List[str], str])->Union[List[dict], str]: + """ + Get Entity Linking from Thai Text + + :param str Union[List[str], str]: list thai text or text + :return: list of entity linking + :rtype: Union[List[dict], str] + + :Example: + :: + + from pythainlp.el import EntityLinker + + el = EntityLinker(device="cuda") + print(el.get_el("จ๊อบเคยเป็นซีอีโอบริษัทแอปเปิล")) + # output: [{'offsets': [11, 23], + # 'lengths': [6, 7], + # 'entities': ['Q484876', 'Q312'], + # 'md_scores': [0.30301809310913086, 0.6399497389793396], + # 'el_scores': [0.7142490744590759, 0.8657019734382629]}] + """ + return self.model.process_batch(list_text) \ No newline at end of file diff --git a/setup.py b/setup.py index 2b750c0a9..0a777fdc7 100644 --- a/setup.py +++ b/setup.py @@ -125,6 +125,9 @@ "wsd":{ "sentence-transformers>=2.2.2" }, + "el":{ + "multiel>=0.5" + }, "full": [ "PyYAML>=5.3.1", "attacut>=1.0.4", From 9133b5f4b2f6736349411eef43e7170ee7797c05 Mon Sep 17 00:00:00 2001 From: Wannaphong Phatthiyaphaibun Date: Wed, 19 Jul 2023 12:52:08 +0700 Subject: [PATCH 2/2] Create test_el.py --- tests/test_el.py | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 tests/test_el.py diff --git a/tests/test_el.py b/tests/test_el.py new file mode 100644 index 000000000..88168c0ca --- /dev/null +++ b/tests/test_el.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +import unittest +from pythainlp.el import EntityLinker + + +class TestElPackage(unittest.TestCase): + def test_EntityLinker(self): + with self.assertRaises(NotImplementedError): + EntityLinker(model_name="cat") + with self.assertRaises(NotImplementedError): + EntityLinker(tag="cat")