Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 16 additions & 14 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,20 +69,22 @@
'sinatools.CLI.DataDownload.get_appdatadir:main'),
('download_files='
'sinatools.CLI.DataDownload.download_files:main'),
('corpus_entity_extractor='
'sinatools.CLI.ner.corpus_entity_extractor:main'),
('text_dublication_detector='
'sinatools.CLI.utils.text_dublication_detector:main'),
('evaluate_synonyms='
'sinatools.CLI.synonyms.evaluate_synonyms:main'),
('extend_synonyms='
'sinatools.CLI.synonyms.extend_synonyms:main'),
('semantic_relatedness='
'sinatools.CLI.semantic_relatedness.compute_relatedness:main'),
('relation_extractor='
'sinatools.CLI.relations.relation_extractor:main'),
],
},
('corpus_entity_extractor='
'sinatools.CLI.ner.corpus_entity_extractor:main'),
('text_dublication_detector='
'sinatools.CLI.utils.text_dublication_detector:main'),
('evaluate_synonyms='
'sinatools.CLI.synonyms.evaluate_synonyms:main'),
('extend_synonyms='
'sinatools.CLI.synonyms.extend_synonyms:main'),
('semantic_relatedness='
'sinatools.CLI.semantic_relatedness.compute_relatedness:main'),
('relation_extractor='
'sinatools.CLI.relations.relation_extractor:main'),
('convert_tag_vocab='
'sinatools.CLI.ner.convert_tag_vocab:main'),
],
},
data_files=[('sinatools', ['sinatools/environment.yml'])],
package_data={'sinatools': ['data/*.pickle', 'environment.yml']},
install_requires=requirements,
Expand Down
74 changes: 74 additions & 0 deletions sinatools/CLI/ner/convert_tag_vocab.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
"""CLI helper to convert legacy torchtext tag vocabularies."""
from __future__ import annotations

import argparse
from pathlib import Path
from typing import Optional

from sinatools.ner.tag_vocab import convert_tag_vocab_file


def _build_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
description="Convert a legacy tag_vocab.pkl to the native SinaTools format",
)
parser.add_argument(
"model_dir",
type=Path,
help="Directory that contains tag_vocab.pkl (e.g. models/sinatools/Wj27012000.tar)",
)
parser.add_argument(
"--backup-suffix",
default=".legacy",
help="Suffix used for the backup copy (default: .legacy)",
)
parser.add_argument(
"--no-backup",
action="store_true",
help="Skip creating a backup copy before rewriting tag_vocab.pkl",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Only report whether conversion is needed without writing files",
)
return parser


def main(argv: Optional[list[str]] = None) -> int:
parser = _build_parser()
args = parser.parse_args(argv)

model_dir: Path = args.model_dir
pickle_path = model_dir / "tag_vocab.pkl"

if not pickle_path.exists():
parser.error(f"No tag_vocab.pkl found at {pickle_path}")

backup_suffix = None if args.no_backup else args.backup_suffix

try:
converted, backup_path = convert_tag_vocab_file(
model_dir,
backup_suffix=backup_suffix,
dry_run=args.dry_run,
)
except Exception as exc: # pragma: no cover - CLI surface
parser.error(str(exc))

if args.dry_run:
if converted:
parser.exit(0, "Conversion required (dry-run).\n")
parser.exit(0, "No conversion required.\n")

if not converted:
parser.exit(0, "tag_vocab.pkl already uses the native format.\n")

message = "Converted tag_vocab.pkl" if backup_suffix is None else (
f"Converted tag_vocab.pkl (backup saved to {backup_path})"
)
parser.exit(0, message + "\n")


if __name__ == "__main__": # pragma: no cover - CLI entry point
raise SystemExit(main())
8 changes: 2 additions & 6 deletions sinatools/ner/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
from sinatools.DataDownload import downloader
import os
from sinatools.ner.helpers import load_object
import pickle
import os
from sinatools.ner.tag_vocab import load_tag_vocab
import torch
import pickle
import json
from argparse import Namespace

Expand All @@ -17,9 +15,7 @@
model_path = os.path.join(path, filename)

_path = os.path.join(model_path, "tag_vocab.pkl")

with open(_path, "rb") as fh:
tag_vocab = pickle.load(fh)
tag_vocab = load_tag_vocab(_path)

train_config = Namespace()
args_path = os.path.join(model_path, "args.json")
Expand Down
7 changes: 3 additions & 4 deletions sinatools/ner/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
import importlib
import shutil
import torch
import pickle
import json
import random
import numpy as np
from argparse import Namespace
from sinatools.ner.tag_vocab import load_tag_vocab


def logging_config(log_file=None):
Expand Down Expand Up @@ -70,8 +70,7 @@ def load_checkpoint(model_path):
vocab - arabicner.utils.data.Vocab - indexed tags
train_config - argparse.Namespace - training configurations
"""
with open(os.path.join(model_path, "tag_vocab.pkl"), "rb") as fh:
tag_vocab = pickle.load(fh)
tag_vocab = load_tag_vocab(os.path.join(model_path, "tag_vocab.pkl"))

# Load train configurations from checkpoint
train_config = Namespace()
Expand Down Expand Up @@ -114,4 +113,4 @@ def set_seed(seed):

torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.enabled = False
torch.backends.cudnn.enabled = False
Loading