Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions src/ocrd/cli/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
client_cli,
processing_server_cli,
processing_worker_cli,
processor_server_cli,
)


Expand All @@ -27,4 +26,3 @@ def network_cli():
network_cli.add_command(client_cli)
network_cli.add_command(processing_server_cli)
network_cli.add_command(processing_worker_cli)
network_cli.add_command(processor_server_cli)
68 changes: 19 additions & 49 deletions src/ocrd/decorators/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ def ocrd_cli_wrap_processor(
list_resources=False,
# ocrd_network params start #
subcommand=None,
address=None,
queue=None,
log_filename=None,
database=None,
Expand Down Expand Up @@ -88,9 +87,8 @@ def ocrd_cli_wrap_processor(
if list_resources:
processor.list_resources()
sys.exit()
if subcommand or address or queue or database:
# Used for checking/starting network agents for the WebAPI architecture
check_and_run_network_agent(processorClass, subcommand, address, database, queue)
if subcommand == "worker" or queue or database:
check_and_run_processing_worker(processorClass, database, queue)

if 'parameter' in kwargs:
# Disambiguate parameter file/literal, and resolve file
Expand Down Expand Up @@ -160,54 +158,26 @@ def goexit():
run_processor(processorClass, mets_url=mets, workspace=workspace, **kwargs)


def check_and_run_network_agent(ProcessorClass, subcommand: str, address: str, database: str, queue: str):
def check_and_run_processing_worker(ProcessorClass, database: str, queue: str):
""" Check/start Processing Worker for the WebAPI architecture
"""
"""
from ocrd_network import ProcessingWorker, ProcessorServer, AgentType
SUBCOMMANDS = [AgentType.PROCESSING_WORKER, AgentType.PROCESSOR_SERVER]

if not subcommand:
raise ValueError("Subcommand options --address --queue and --database "
f"are only valid for subcommands: {SUBCOMMANDS}")
if subcommand not in SUBCOMMANDS:
raise ValueError(f"SUBCOMMAND can only be one of {SUBCOMMANDS}")
from ocrd_network import ProcessingWorker

if not database:
raise ValueError(f"Option '--database' is invalid for subcommand {subcommand}")

if subcommand == AgentType.PROCESSOR_SERVER:
if not address:
raise ValueError(f"Option '--address' required for subcommand {subcommand}")
if queue:
raise ValueError(f"Option '--queue' invalid for subcommand {subcommand}")
if subcommand == AgentType.PROCESSING_WORKER:
if address:
raise ValueError(f"Option '--address' invalid for subcommand {subcommand}")
if not queue:
raise ValueError(f"Option '--queue' required for subcommand {subcommand}")
raise ValueError("Option '--database' is required for the Processing Worker")
if not queue:
raise ValueError("Option '--queue' is required for the Processing Worker")

processor = ProcessorClass(workspace=None)
if subcommand == AgentType.PROCESSING_WORKER:
processing_worker = ProcessingWorker(
rabbitmq_addr=queue,
mongodb_addr=database,
processor_name=processor.ocrd_tool['executable'],
ocrd_tool=processor.ocrd_tool,
processor_class=ProcessorClass,
)
# The RMQConsumer is initialized and a connection to the RabbitMQ is performed
processing_worker.connect_consumer()
# Start consuming from the queue with name `processor_name`
processing_worker.start_consuming()
elif subcommand == AgentType.PROCESSOR_SERVER:
# TODO: Better validate that inside the ProcessorServer itself
host, port = address.split(':')
processor_server = ProcessorServer(
mongodb_addr=database,
processor_name=processor.ocrd_tool['executable'],
processor_class=ProcessorClass,
)
processor_server.run_server(host=host, port=int(port))
else:
raise ValueError(f"Unknown network agent type, must be one of: {SUBCOMMANDS}")
processing_worker = ProcessingWorker(
rabbitmq_addr=queue,
mongodb_addr=database,
processor_name=processor.ocrd_tool['executable'],
ocrd_tool=processor.ocrd_tool,
processor_class=ProcessorClass,
)
# The RMQConsumer is initialized and a connection to the RabbitMQ is performed
processing_worker.connect_consumer()
# Start consuming from the queue with name `processor_name`
processing_worker.start_consuming()
sys.exit(0)
8 changes: 2 additions & 6 deletions src/ocrd/decorators/ocrd_cli_options.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import click
from click import option, Path, argument
from ocrd_utils import DEFAULT_METS_BASENAME
from ocrd_network import AgentType
from .parameter_option import parameter_option, parameter_override_option
from .loglevel_option import loglevel_option
from ocrd_network import (
DatabaseParamType,
ServerAddressParamType,
QueueServerParamType
)

Expand Down Expand Up @@ -40,7 +38,6 @@ def cli(**kwargs):
parameter_override_option,
loglevel_option,
option('--log-filename', default=None),
option('--address', type=ServerAddressParamType()),
option('--queue', type=QueueServerParamType()),
option('--database', type=DatabaseParamType()),
option('-R', '--resolve-resource'),
Expand All @@ -50,13 +47,12 @@ def cli(**kwargs):
option('-D', '--dump-module-dir', is_flag=True, default=False),
option('-h', '--help', is_flag=True, default=False),
option('-V', '--version', is_flag=True, default=False),
# Subcommand, only used for 'worker'/'server'. Cannot be handled in
# Subcommand, only used for 'worker'. Cannot be handled in
# click because processors use the @command decorator and even if they
# were using `group`, you cannot combine have a command with
# subcommands. So we have to work around that by creating a
# pseudo-subcommand handled in ocrd_cli_wrap_processor
argument('subcommand', nargs=1, required=False,
type=click.Choice(list(map(str, AgentType)))),
argument('subcommand', nargs=1, required=False, type=click.Choice(["worker"])),
]
for param in params:
param(f)
Expand Down
19 changes: 6 additions & 13 deletions src/ocrd/lib.bash
Original file line number Diff line number Diff line change
Expand Up @@ -183,30 +183,23 @@ ocrd__parse_argv () {
-V|--version) ocrd ocrd-tool "$OCRD_TOOL_JSON" version; exit ;;
--queue) ocrd__worker_queue="$2" ; shift ;;
--database) ocrd__worker_database="$2" ; shift ;;
--address) ocrd__worker_address="$2" ; shift ;;
*) ocrd__raise "Unknown option '$1'" ;;
esac
shift
done

if [ -v ocrd__worker_queue -o -v ocrd__worker_database -o -v ocrd__subcommand -o -v ocrd__worker_address ]; then
if [ -v ocrd__worker_queue -o -v ocrd__worker_database -o -v ocrd__subcommand ]; then
if ! [ -v ocrd__subcommand ] ; then
ocrd__raise "Provide subcommand 'worker' or 'server' for Processing Worker / Processor Server"
ocrd__raise "Provide subcommand 'worker' for Processing Worker"
elif ! [ -v ocrd__worker_database ]; then
ocrd__raise "For the Processing Worker / Processor Server --database is required"
ocrd__raise "For the Processing Worker --database is required"
elif ! [ -v ocrd__worker_queue ]; then
ocrd__raise "For the Processing Worker --queue is required"
fi
if [ ${ocrd__subcommand} = "worker" ]; then
if ! [ -v ocrd__worker_queue ]; then
ocrd__raise "For the Processing Worker --queue is required"
fi
ocrd network processing-worker $OCRD_TOOL_NAME --queue "${ocrd__worker_queue}" --database "${ocrd__worker_database}"
elif [ ${ocrd__subcommand} = "server" ]; then
if ! [ -v ocrd__worker_address ]; then
ocrd__raise "For the Processor Server --address is required"
fi
ocrd network processor-server $OCRD_TOOL_NAME --database "${ocrd__worker_database}" --address "${ocrd__worker_address}"
else
ocrd__raise "subcommand must be either 'worker' or 'server' not '${ocrd__subcommand}'"
ocrd__raise "subcommand must be 'worker' not '${ocrd__subcommand}'"
fi
exit
fi
Expand Down
20 changes: 3 additions & 17 deletions src/ocrd/processor/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -555,9 +555,9 @@
self._base_logger.debug("stopped executor %s after %d tasks", str(executor), len(tasks) if tasks else -1)
if max_workers > 1:
# can cause deadlock:
#log_listener.stop()

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, macos-latest)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, macos-latest)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, macos-latest)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, macos-latest)

E265

block comment should start with '# '

Check failure on line 558 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, macos-latest)

E265

block comment should start with '# '
# not much better:
#log_listener.enqueue_sentinel()

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, macos-latest)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, macos-latest)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, macos-latest)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, macos-latest)

E265

block comment should start with '# '

Check failure on line 560 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, macos-latest)

E265

block comment should start with '# '
pass

except NotImplementedError:
Expand Down Expand Up @@ -640,7 +640,7 @@
self._base_logger.error(repr(e))
self._base_logger.warning(f"failed downloading file {input_file} for page {page_id}")
# process page
#executor.submit(self.process_page_file, *input_files)

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, macos-latest)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, macos-latest)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, macos-latest)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, macos-latest)

E265

block comment should start with '# '

Check failure on line 643 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, macos-latest)

E265

block comment should start with '# '
return executor.submit(_page_worker, max_seconds, *input_files), page_id, input_files

def process_workspace_handle_tasks(self, tasks: Dict[TFuture, Tuple[str, List[Optional[OcrdFileType]]]]) -> Tuple[
Expand Down Expand Up @@ -915,7 +915,7 @@
value=self.version),
LabelType(type_='ocrd/core',
value=OCRD_VERSION)])
])

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, macos-latest)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, ubuntu-22.04)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, ubuntu-22.04)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, ubuntu-22.04)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, ubuntu-22.04)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, ubuntu-22.04)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, macos-latest)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, macos-latest)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, macos-latest)

E124

closing bracket does not match visual indentation

Check failure on line 918 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, macos-latest)

E124

closing bracket does not match visual indentation
metadata_obj.add_MetadataItem(metadata_item)

def resolve_resource(self, val):
Expand Down Expand Up @@ -1193,7 +1193,7 @@
if hasattr(file, 'pageId')), "")
pool = ThreadPool(processes=1)
try:
#_page_worker_processor.process_page_file(*input_files)

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, macos-latest)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.9, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.12, macos-latest)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.11, macos-latest)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.10, macos-latest)

E265

block comment should start with '# '

Check failure on line 1196 in src/ocrd/processor/base.py

View workflow job for this annotation

GitHub Actions / build (3.8, macos-latest)

E265

block comment should start with '# '
async_result = pool.apply_async(_page_worker_processor.process_page_file, input_files)
async_result.get(timeout or None)
_page_worker_processor.logger.debug("page worker completed for page %s", page_id)
Expand All @@ -1209,7 +1209,7 @@
ocrd_tool (dict): this processor's ``tools`` section of the module's ``ocrd-tool.json``
processor_instance (object, optional): the processor implementation
(for adding any module/class/function docstrings)
subcommand (string): 'worker' or 'server'
subcommand (string, optional): 'worker'
"""
doc_help = ''
if processor_instance:
Expand All @@ -1235,7 +1235,6 @@
preserve_paragraphs=True)
subcommands = '''\
worker Start a processing worker rather than do local processing
server Start a processor server rather than do local processing
'''

processing_worker_options = '''\
Expand All @@ -1250,8 +1249,6 @@
'''

processing_server_options = '''\
--address The Processor server address in format
"{host}:{port}"
--database The MongoDB server address in format
"mongodb://{host}:{port}"
[mongodb://localhost:27018]
Expand Down Expand Up @@ -1296,8 +1293,8 @@
parameter_help = ' NONE\n'
else:
def wrap(s):
return wrap_text(s, initial_indent=' '*3,
subsequent_indent=' '*4,
return wrap_text(s, initial_indent=' ' * 3,
subsequent_indent=' ' * 4,
width=72, preserve_paragraphs=True)
for param_name, param in ocrd_tool['parameters'].items():
parameter_help += wrap('"%s" [%s%s]' % (
Expand Down Expand Up @@ -1335,17 +1332,6 @@

Options:
{processing_worker_options}
'''
elif subcommand == 'server':
return f'''\
Usage: {ocrd_tool['executable']} server [OPTIONS]

Run {ocrd_tool['executable']} as a processor sever.

{ocrd_tool['description']}{doc_help}

Options:
{processing_server_options}
'''
else:
pass
3 changes: 1 addition & 2 deletions src/ocrd/processor/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,7 @@
when a match occurs - as long as the program is being run. They only get deleted (and
their resources freed) when as many as :py:data:`~ocrd_utils.config.OCRD_MAX_PROCESSOR_CACHE`
instances have already been cached while this particular parameter set was re-used
least frequently. (See :py:class:`~ocrd_network.ProcessingWorker` and
:py:class:`~ocrd_network.ProcessorServer` for use-cases.)
least frequently. (See :py:class:`~ocrd_network.ProcessingWorker` for use-cases.)

Args:
processorClass (object): Python class of the module processor.
Expand Down Expand Up @@ -226,7 +225,7 @@


# not decorated here but at runtime (on first use)
#@freeze_args

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.9, macos-latest)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.11, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.8, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.9, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.10, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.12, ubuntu-22.04)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.12, macos-latest)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.11, macos-latest)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.10, macos-latest)

E265

block comment should start with '# '

Check failure on line 228 in src/ocrd/processor/helpers.py

View workflow job for this annotation

GitHub Actions / build (3.8, macos-latest)

E265

block comment should start with '# '
#@lru_cache(maxsize=config.OCRD_MAX_PROCESSOR_CACHE)
def get_cached_processor(parameter: dict, processor_class):
"""
Expand Down
3 changes: 1 addition & 2 deletions src/ocrd_network/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from .client import Client
from .constants import AgentType, JobState
from .constants import JobState
from .processing_server import ProcessingServer
from .processing_worker import ProcessingWorker
from .processor_server import ProcessorServer
from .param_validators import DatabaseParamType, ServerAddressParamType, QueueServerParamType
from .server_cache import CacheLockedPages, CacheProcessingRequests
2 changes: 0 additions & 2 deletions src/ocrd_network/cli/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
from .client import client_cli
from .processing_server import processing_server_cli
from .processing_worker import processing_worker_cli
from .processor_server import processor_server_cli

__all__ = [
'client_cli',
'processing_server_cli',
'processing_worker_cli',
'processor_server_cli'
]
7 changes: 1 addition & 6 deletions src/ocrd_network/cli/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def discovery_cli():
@click.option('--address', type=URL, help=ADDRESS_HELP)
def check_deployed_processors(address: Optional[str]):
"""
Get a list of deployed processing workers/processor servers.
Get a list of deployed processing workers.
Each processor is shown only once regardless of the amount of deployed instances.
"""
client = Client(server_addr_processing=address)
Expand Down Expand Up @@ -113,7 +113,6 @@ def check_processing_job_status(address: Optional[str], processing_job_id: str):
@parameter_override_option
@click.option('--result-queue-name')
@click.option('--callback-url')
@click.option('--agent-type', default='worker')
@click.option('-b', '--block', default=False, is_flag=True,
help='If set, the client will block till job timeout, fail or success.')
@click.option('-p', '--print-state', default=False, is_flag=True,
Expand All @@ -129,9 +128,6 @@ def send_processing_job_request(
parameter_override: List[Tuple[str, str]],
result_queue_name: Optional[str],
callback_url: Optional[str],
# TODO: This is temporally available to toggle
# between the ProcessingWorker/ProcessorServer
agent_type: Optional[str],
block: Optional[bool],
print_state: Optional[bool]
):
Expand All @@ -142,7 +138,6 @@ def send_processing_job_request(
"path_to_mets": mets,
"description": "OCR-D Network client request",
"input_file_grps": input_file_grp.split(','),
"agent_type": agent_type
}
if output_file_grp:
req_params["output_file_grps"] = output_file_grp.split(',')
Expand Down
3 changes: 1 addition & 2 deletions src/ocrd_network/cli/processing_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@
def processing_server_cli(path_to_config, address: str):
"""
Start the Processing Server
(proxy between the user and the
Processing Worker(s) / Processor Server(s))
(proxy between the user and the Processing Worker(s))
"""

# Note, the address is already validated with the type field
Expand Down
31 changes: 0 additions & 31 deletions src/ocrd_network/cli/processor_server.py

This file was deleted.

8 changes: 1 addition & 7 deletions src/ocrd_network/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,6 @@ def __str__(self):
return self.value


class AgentType(StrEnum):
PROCESSING_WORKER = "worker"
PROCESSOR_SERVER = "server"


class DeployType(StrEnum):
# Deployed by the Processing Server config file
DOCKER = "docker"
Expand All @@ -40,7 +35,7 @@ class JobState(StrEnum):
failed = "FAILED"
# The processing job is queued inside the RabbitMQ
queued = "QUEUED"
# Processing job is currently running in a Worker or Processor Server
# Processing job is currently running on a Worker
running = "RUNNING"
# Processing job finished successfully
success = "SUCCESS"
Expand All @@ -53,7 +48,6 @@ class NetworkLoggingDirs(StrEnum):
PROCESSING_JOBS = "processing_jobs"
PROCESSING_SERVERS = "processing_servers"
PROCESSING_WORKERS = "processing_workers"
PROCESSOR_SERVERS = "processor_servers"


class ServerApiTags(StrEnum):
Expand Down
9 changes: 2 additions & 7 deletions src/ocrd_network/logging_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from pathlib import Path

from ocrd_utils import config, LOG_FORMAT, safe_filename
from .constants import AgentType, NetworkLoggingDirs
from .constants import NetworkLoggingDirs


def configure_file_handler_with_formatter(logger: Logger, log_file: Path, mode: str = "a") -> None:
Expand Down Expand Up @@ -54,10 +54,5 @@ def get_processing_server_logging_file_path(pid: int) -> Path:


def get_processing_worker_logging_file_path(processor_name: str, pid: int) -> Path:
log_file: str = f"{AgentType.PROCESSING_WORKER}.{pid}.{processor_name}.log"
log_file: str = f"worker.{pid}.{processor_name}.log"
return Path(get_root_logging_dir(NetworkLoggingDirs.PROCESSING_WORKERS), log_file)


def get_processor_server_logging_file_path(processor_name: str, pid: int) -> Path:
log_file: str = f"{AgentType.PROCESSOR_SERVER}.{pid}.{processor_name}.log"
return Path(get_root_logging_dir(NetworkLoggingDirs.PROCESSOR_SERVERS), log_file)
Loading
Loading