diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..b2a608d --- /dev/null +++ b/.flake8 @@ -0,0 +1,5 @@ +[flake8] +exclude = + .git, + __pycache__, +max-line-length = 115 diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 0000000..9035d06 --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,14 @@ +name: pre-commit + +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: pre-commit/action@v2.0.3 diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..b9fb3f3 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,2 @@ +[settings] +profile=black diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..3b085b9 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,30 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-ast + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + - id: debug-statements + +- repo: https://gitlab.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + args: ["--ignore=E203,W503"] + +- repo: https://github.com/timothycrosley/isort + rev: 5.9.3 + hooks: + - id: isort + +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black diff --git a/LICENSE b/LICENSE index a6393ed..3c99b84 100644 --- a/LICENSE +++ b/LICENSE @@ -27,4 +27,3 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/README.md b/README.md index 0e92c9d..1102ad6 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,3 @@ # Workflows Repository of workflows for the FXI beamline. - diff --git a/end_of_run_workflow.py b/end_of_run_workflow.py index 56a96fd..3d71406 100644 --- a/end_of_run_workflow.py +++ b/end_of_run_workflow.py @@ -1,5 +1,5 @@ import prefect -from prefect import task, Flow, Parameter +from prefect import Flow, Parameter, task from prefect.tasks.prefect import create_flow_run @@ -15,12 +15,9 @@ def log_completion(): validation_flow = create_flow_run( flow_name="general-data-validation", project_name="FXI", - parameters={"beamline_acronym": "fxi" ,"uid": uid} + parameters={"beamline_acronym": "fxi", "uid": uid}, ) export_flow = create_flow_run( - flow_name="export", - project_name="FXI", - parameters={"uid": uid} + flow_name="export", project_name="FXI", parameters={"uid": uid} ) log_completion(upstream_tasks=[validation_flow, export_flow]) - diff --git a/export.py b/export.py index d863a3c..edeb9fc 100644 --- a/export.py +++ b/export.py @@ -1,14 +1,14 @@ -import databroker import datetime +import os +from pathlib import Path + +import databroker import h5py import numpy as np -import os import pandas as pd import prefect - -from pathlib import Path from PIL import Image -from prefect import task, Flow, Parameter +from prefect import Flow, Parameter, task @task @@ -69,9 +69,9 @@ def get_img(run, det="Andor", sli=[]): "Take in a Header and return a numpy array of detA1 image(s)." det_name = f"{det}_image" if len(sli) == 2: - img = np.array(list(run['primary']['data'][det_name])[sli[0] : sli[1]]) + img = np.array(list(run["primary"]["data"][det_name])[sli[0] : sli[1]]) else: - img = np.array(list(run['primary']['data'][det_name])) + img = np.array(list(run["primary"]["data"][det_name])) return np.squeeze(img) @@ -92,17 +92,17 @@ def bin_ndarray(ndarray, new_shape=None, operation="mean"): [262 270 278 286 294] [342 350 358 366 374]] """ - if new_shape == None: + if new_shape is None: s = np.array(ndarray.shape) s1 = np.int32(s / 2) new_shape = tuple(s1) operation = operation.lower() - if not operation in ["sum", "mean"]: + if operation not in ["sum", "mean"]: raise ValueError("Operation not supported.") if ndarray.ndim != len(new_shape): raise ValueError("Shape mismatch: {} -> {}".format(ndarray.shape, new_shape)) compression_pairs = [(d, c // d) for d, c in zip(new_shape, ndarray.shape)] - flattened = [l for p in compression_pairs for l in p] + flattened = [ell for p in compression_pairs for ell in p] ndarray = ndarray.reshape(flattened) for i in range(len(new_shape)): op = getattr(ndarray, operation) @@ -727,7 +727,8 @@ def export_raster_2D_2(run, binning=4, filepath="", **kwargs): y_list[j] * pix * img_sizeY / 1000, ] pos_file.append( - f"{x_list[i]:3.0f}\t{y_list[j]:3.0f}\t{x_list[i]*pix*img_sizeX/1000:3.3f}\t\t{y_list[j]*pix*img_sizeY/1000:3.3f}\n" + f"{x_list[i]:3.0f}\t{y_list[j]:3.0f}\t{x_list[i]*pix*img_sizeX/1000:3.3f}\t\t" + + f"{y_list[j]*pix*img_sizeY/1000:3.3f}\n" ) index = index + 1 s = img_patch.shape @@ -804,7 +805,8 @@ def export_raster_2D(run, binning=4, filepath="", **kwargs): y_list[j] * pix * img_sizeY / 1000, ] pos_file.append( - f"{x_list[i]:3.0f}\t{y_list[j]:3.0f}\t{x_list[i]*pix*img_sizeX/1000:3.3f}\t\t{y_list[j]*pix*img_sizeY/1000:3.3f}\n" + f"{x_list[i]:3.0f}\t{y_list[j]:3.0f}\t{x_list[i]*pix*img_sizeX/1000:3.3f}\t\t" + + f"{y_list[j]*pix*img_sizeY/1000:3.3f}\n" ) index = index + 1 s = img_patch.shape diff --git a/load_scan.py b/load_scan.py deleted file mode 100644 index ecc556d..0000000 --- a/load_scan.py +++ /dev/null @@ -1,64 +0,0 @@ -from datetime import datetime -import numpy as np -import pandas as pd -from databroker.assets.handlers import AreaDetectorHDF5TimestampHandler - - -EPICS_EPOCH = datetime(1990, 1, 1, 0, 0) - - -def convert_AD_timestamps(ts): - return pd.to_datetime(ts, unit="s", origin=EPICS_EPOCH, utc=True).dt.tz_convert( - "US/Eastern" - ) - -def get_tomo_images(input_dict): - pos = input_dict['pos'] - imgs = input_dict['imgs'] - chunked_timestamps = input_dict['chunked_timestamps'] - mot_pos = input_dict['mot_pos'] - - raw_timestamps = [] - for chunk in chunked_timestamps: - raw_timestamps.extend(chunk.tolist()) - - timestamps = convert_AD_timestamps(pd.Series(raw_timestamps)) - pos["time"] = pos["time"].dt.tz_localize("US/Eastern") - - img_day, img_hour = ( - timestamps.dt.day, - timestamps.dt.hour, - ) - img_min, img_sec, img_msec = ( - timestamps.dt.minute, - timestamps.dt.second, - timestamps.dt.microsecond, - ) - img_time = ( - img_day * 86400 + img_hour * 3600 + img_min * 60 + img_sec + img_msec * 1e-6 - ) - img_time = np.array(img_time) - - mot_day, mot_hour = ( - pos["time"].dt.day, - pos["time"].dt.hour, - ) - mot_min, mot_sec, mot_msec = ( - pos["time"].dt.minute, - pos["time"].dt.second, - pos["time"].dt.microsecond, - ) - mot_time = ( - mot_day * 86400 + mot_hour * 3600 + mot_min * 60 + mot_sec + mot_msec * 1e-6 - ) - mot_time = np.array(mot_time) - - offset = np.min([np.min(img_time), np.min(mot_time)]) - img_time -= offset - mot_time -= offset - mot_pos_interp = np.interp(img_time, mot_time, mot_pos) - - pos2 = mot_pos_interp.argmax() + 1 - img_angle = mot_pos_interp[: pos2 - chunk_size] # rotation angles - img_tomo = imgs[: pos2 - chunk_size] # tomo images - return img_tomo, img_angle diff --git a/print_scanid.py b/print_scanid.py index 548beb0..94e22e8 100644 --- a/print_scanid.py +++ b/print_scanid.py @@ -1,18 +1,20 @@ import databroker import prefect -from prefect import task, Flow +from prefect import Flow, task + @task def print_scanid(): client = databroker.from_profile("nsls2", username=None) - scan_id = client['fxi'][-1].start['scan_id'] + scan_id = client["fxi"][-1].start["scan_id"] logger = prefect.context.get("logger") logger.info(f"Scan ID: {scan_id}") + with Flow("scan_id") as flow: print_scanid() -#flow.register(project_name='TST', +# flow.register(project_name='TST', # labels=['fxi-2022-2.2'], # add_default_labels=False, # set_schedule_active=False) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..481f86a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,9 @@ +[tool.black] +line-length = 88 +exclude = ''' +( + /( + | \.git + )/ +) +''' diff --git a/tasks.py b/tasks.py index 6373331..68d100a 100644 --- a/tasks.py +++ b/tasks.py @@ -1,10 +1,12 @@ -import prefect -from prefect import task, Flow, Parameter -from tiled.client import from_profile from datetime import datetime + import numpy as np import pandas as pd -from databroker.assets.handlers import AreaDetectorHDF5TimestampHandler +import prefect +import tomopy +from prefect import Flow, Parameter, task +from scipy.interpolate import interp1d +from tiled.client import from_profile def timestamp_to_float(t): @@ -13,11 +15,12 @@ def timestamp_to_float(t): tf.append(ts) return np.array(tf) + def get_fly_scan_angle(input_dict): timestamp_tomo = input_dict["timestamp_tomo"] pos = input_dict["pos"] mot_pos = input_dict["mot_pos"] - + timestamp_mot = timestamp_to_float(pos["time"]) img_ini_timestamp = timestamp_tomo[0][0] @@ -25,7 +28,7 @@ def get_fly_scan_angle(input_dict): 1 ] # timestamp_mot[1] is the time when taking dark image - print(f'timestamp_tomo: {timestamp_tomo} img_ini_timestamp: {img_ini_timestamp}') + print(f"timestamp_tomo: {timestamp_tomo} img_ini_timestamp: {img_ini_timestamp}") tomo_time = timestamp_tomo[0] - img_ini_timestamp mot_time = timestamp_mot - mot_ini_timestamp @@ -34,6 +37,7 @@ def get_fly_scan_angle(input_dict): img_angle = mot_pos_interp return img_angle + @task(log_stdout=True) def call_find_rot(uid): c = from_profile("nsls2", username=None) @@ -45,19 +49,18 @@ def call_find_rot(uid): # sanity check: make sure we remembered the right stream name assert "zps_pi_r_monitor" in scan_result pos = scan_result["zps_pi_r_monitor"]["data"] - logger.info('extracting data from tiled') + logger.info("extracting data from tiled") imgs = np.array(list(scan_result["primary"]["data"]["Andor_image"])) s1 = imgs.shape - chunk_size = s1[1] imgs = imgs.reshape(-1, s1[2], s1[3]) - logger.info('done with primary images') + logger.info("done with primary images") # load darks and bkgs img_dark = np.array(list(scan_result["dark"]["data"]["Andor_image"]))[0] - logger.info('done with darks') + logger.info("done with darks") img_bkg = np.array(list(scan_result["flat"]["data"]["Andor_image"]))[0] - logger.info('done with background') + logger.info("done with background") img_dark_avg = np.mean(img_dark, axis=0, keepdims=True) img_bkg_avg = np.mean(img_bkg, axis=0, keepdims=True) @@ -65,16 +68,15 @@ def call_find_rot(uid): mot_pos = np.array(pos["zps_pi_r"]) - input_dict = {'pos': pos, - 'timestamp_tomo': chunked_timestamps, - 'mot_pos': mot_pos} + input_dict = {"pos": pos, "timestamp_tomo": chunked_timestamps, "mot_pos": mot_pos} img_tomo = np.array(list(scan_result["primary"]["data"]["Andor_image"]))[0] logger.info(img_tomo) img_angle = get_fly_scan_angle(input_dict) - logger.info('calculating rotation center') + logger.info("calculating rotation center") img, cen = rotcen_test2(img_tomo, img_bkg_avg, img_dark_avg, img_angle) return img, cen + with Flow("test-find-rot") as flow1: uid = Parameter("uid") call_find_rot(uid) @@ -87,66 +89,12 @@ def convert_AD_timestamps(ts): "US/Eastern" ) -def get_tomo_images(input_dict): - pos = input_dict['pos'] - imgs = input_dict['imgs'] - chunked_timestamps = input_dict['chunked_timestamps'] - mot_pos = input_dict['mot_pos'] - - raw_timestamps = [] - for chunk in chunked_timestamps: - raw_timestamps.extend(chunk.tolist()) - - timestamps = convert_AD_timestamps(pd.Series(raw_timestamps)) - pos["time"] = pos["time"].dt.tz_localize("US/Eastern") - - img_day, img_hour = ( - timestamps.dt.day, - timestamps.dt.hour, - ) - img_min, img_sec, img_msec = ( - timestamps.dt.minute, - timestamps.dt.second, - timestamps.dt.microsecond, - ) - img_time = ( - img_day * 86400 + img_hour * 3600 + img_min * 60 + img_sec + img_msec * 1e-6 - ) - img_time = np.array(img_time) - - mot_day, mot_hour = ( - pos["time"].dt.day, - pos["time"].dt.hour, - ) - mot_min, mot_sec, mot_msec = ( - pos["time"].dt.minute, - pos["time"].dt.second, - pos["time"].dt.microsecond, - ) - mot_time = ( - mot_day * 86400 + mot_hour * 3600 + mot_min * 60 + mot_sec + mot_msec * 1e-6 - ) - mot_time = np.array(mot_time) - - offset = np.min([np.min(img_time), np.min(mot_time)]) - img_time -= offset - mot_time -= offset - mot_pos_interp = np.interp(img_time, mot_time, mot_pos) - - pos2 = mot_pos_interp.argmax() + 1 - img_angle = mot_pos_interp[: pos2 - chunk_size] # rotation angles - img_tomo = imgs[: pos2 - chunk_size] # tomo images - return img_tomo, img_angle - -import numpy as np -import tomopy -from scipy.interpolate import interp1d - def find_nearest(data, value): data = np.array(data) return np.abs(data - value).argmin() + def rotcen_test2( img_tomo, img_bkg_avg, @@ -162,19 +110,19 @@ def rotcen_test2( txm_normed_flag=0, denoise_flag=0, fw_level=9, - algorithm='gridrec', + algorithm="gridrec", n_iter=5, circ_mask_ratio=0.95, options={}, atten=None, clim=[], dark_scale=1, - filter_name='None', + filter_name="None", ): - print('beginning of rotcen2') - s = [1, data.shape[0], data.shape[1]] + print("beginning of rotcen2") + s = [1, img_tomo.shape[0], img_tomo.shape[1]] - if not atten is None: + if atten is not None: ref_ang = atten[:, 0] ref_atten = atten[:, 1] fint = interp1d(ref_ang, ref_atten) @@ -200,7 +148,7 @@ def rotcen_test2( img_bkg = np.array(img_bkg_avg[:, sli_exp[0] : sli_exp[1], :]) img_dark = np.array(img_dark_avg[:, sli_exp[0] : sli_exp[1], :]) / dark_scale prj = (img_tomo - img_dark) / (img_bkg - img_dark) - if not atten is None: + if atten is not None: for i in range(len(tomo_angle)): att = fint(tomo_angle[i]) prj[i] = prj[i] / att @@ -213,7 +161,7 @@ def rotcen_test2( prj_norm -= bkg_level - print('tomopy prep') + print("tomopy prep") prj_norm = tomopy.prep.stripe.remove_stripe_fw( prj_norm, level=fw_level, wname="db5", sigma=1, pad=True ) @@ -240,42 +188,42 @@ def rotcen_test2( allow_list = list(set(np.arange(len(prj_norm))) - set(block_list)) prj_norm = prj_norm[allow_list] theta = theta[allow_list] - if start == None or stop == None or steps == None: + if start is None or stop is None or steps is None: start = int(s[2] / 2 - 50) stop = int(s[2] / 2 + 50) steps = 26 cen = np.linspace(start, stop, steps) img = np.zeros([len(cen), s[2], s[2]]) - print('tomopy start reconstructions') + print("tomopy start reconstructions") for i in range(len(cen)): if print_flag: print("{}: rotcen {}".format(i + 1, cen[i])) - if algorithm == 'gridrec': + if algorithm == "gridrec": img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm="gridrec", - filter_name=filter_name + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm="gridrec", + filter_name=filter_name, ) - elif 'astra' in algorithm: - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=tomopy.astra, - options=options + elif "astra" in algorithm: + img[i] = tomopy.recon( + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm=tomopy.astra, + options=options, ) else: img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=algorithm, - num_iter=n_iter, - filter_name=filter_name + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm=algorithm, + num_iter=n_iter, + filter_name=filter_name, ) - print('tomopy circ_mask') + print("tomopy circ_mask") img = tomopy.circ_mask(img, axis=0, ratio=circ_mask_ratio) return img, cen @@ -299,4 +247,3 @@ def denoise(prj, denoise_flag): prj = gf(prj, [0, 1, 1]) return prj - diff --git a/test_tasks.py b/test_tasks.py index c8687ed..0107ed6 100644 --- a/test_tasks.py +++ b/test_tasks.py @@ -1,12 +1,14 @@ import prefect -from prefect import task, Flow +from prefect import Flow, task + @task def print_stuff(): logger = prefect.context.get("logger") logger.info("printing stuff") + with Flow("print_stuff") as flow1: print_stuff() -#flow1.register(project_name="TST", labels=["tst-2022-2.2"], add_default_labels=False, set_schedule_active=False) +# flow1.register(project_name="TST", labels=["tst-2022-2.2"], add_default_labels=False, set_schedule_active=False) diff --git a/tomo_recon_tiled.py b/tomo_recon_tiled.py deleted file mode 100644 index 4439170..0000000 --- a/tomo_recon_tiled.py +++ /dev/null @@ -1,158 +0,0 @@ -import numpy as np -import tomopy -from scipy.interpolate import interp1d - - -def find_nearest(data, value): - data = np.array(data) - return np.abs(data - value).argmin() - -def rotcen_test2( - img_tomo, - img_bkg_avg, - img_dark_avg, - img_angle, - start=None, - stop=None, - steps=None, - sli=0, - block_list=[], - print_flag=1, - bkg_level=0, - txm_normed_flag=0, - denoise_flag=0, - fw_level=9, - algorithm='gridrec', - n_iter=5, - circ_mask_ratio=0.95, - options={}, - atten=None, - clim=[], - dark_scale=1, - filter_name='None', -): - s = [1, data.shape[0], data.shape[1]] - - if not atten is None: - ref_ang = atten[:, 0] - ref_atten = atten[:, 1] - fint = interp1d(ref_ang, ref_atten) - - if denoise_flag: - addition_slice = 100 - else: - addition_slice = 0 - - if sli == 0: - sli = int(s[1] / 2) - sli_exp = [ - np.max([0, sli - addition_slice // 2]), - np.min([sli + addition_slice // 2 + 1, s[1]]), - ] - tomo_angle = np.arrayimg_angle - theta = tomo_angle / 180.0 * np.pi - img_tomo = np.array(img_tomo[:, sli_exp[0] : sli_exp[1], :]) - - if txm_normed_flag: - prj_norm = img_tomo - else: - img_bkg = np.array(img_bkg_avg[:, sli_exp[0] : sli_exp[1], :]) - img_dark = np.array(img_dark_avg[:, sli_exp[0] : sli_exp[1], :]) / dark_scale - prj = (img_tomo - img_dark) / (img_bkg - img_dark) - if not atten is None: - for i in range(len(tomo_angle)): - att = fint(tomo_angle[i]) - prj[i] = prj[i] / att - prj_norm = -np.log(prj) - f.close() - - prj_norm = denoise(prj_norm, denoise_flag) - prj_norm[np.isnan(prj_norm)] = 0 - prj_norm[np.isinf(prj_norm)] = 0 - prj_norm[prj_norm < 0] = 0 - - prj_norm -= bkg_level - - prj_norm = tomopy.prep.stripe.remove_stripe_fw( - prj_norm, level=fw_level, wname="db5", sigma=1, pad=True - ) - """ - if denoise_flag == 1: # denoise using wiener filter - ss = prj_norm.shape - for i in range(ss[0]): - prj_norm[i] = skr.wiener(prj_norm[i], psf=psf, reg=reg, balance=balance, is_real=is_real, clip=clip) - elif denoise_flag == 2: - from skimage.filters import gaussian as gf - prj_norm = gf(prj_norm, [0, 1, 1]) - """ - s = prj_norm.shape - if len(s) == 2: - prj_norm = prj_norm.reshape(s[0], 1, s[1]) - s = prj_norm.shape - - if theta[-1] > theta[1]: - pos = find_nearest(theta, theta[0] + np.pi) - else: - pos = find_nearest(theta, theta[0] - np.pi) - block_list = list(block_list) + list(np.arange(pos + 1, len(theta))) - if len(block_list): - allow_list = list(set(np.arange(len(prj_norm))) - set(block_list)) - prj_norm = prj_norm[allow_list] - theta = theta[allow_list] - if start == None or stop == None or steps == None: - start = int(s[2] / 2 - 50) - stop = int(s[2] / 2 + 50) - steps = 26 - cen = np.linspace(start, stop, steps) - img = np.zeros([len(cen), s[2], s[2]]) - for i in range(len(cen)): - if print_flag: - print("{}: rotcen {}".format(i + 1, cen[i])) - if algorithm == 'gridrec': - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm="gridrec", - filter_name=filter_name - ) - elif 'astra' in algorithm: - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=tomopy.astra, - options=options - ) - else: - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=algorithm, - num_iter=n_iter, - filter_name=filter_name - ) - img = tomopy.circ_mask(img, axis=0, ratio=circ_mask_ratio) - return img, cen - - -def denoise(prj, denoise_flag): - if denoise_flag == 1: # Wiener denoise - import skimage.restoration as skr - - ss = prj.shape - psf = np.ones([2, 2]) / (2**2) - reg = None - balance = 0.3 - is_real = True - clip = True - for j in range(ss[0]): - prj[j] = skr.wiener( - prj[j], psf=psf, reg=reg, balance=balance, is_real=is_real, clip=clip - ) - elif denoise_flag == 2: # Gaussian denoise - from skimage.filters import gaussian as gf - - prj = gf(prj, [0, 1, 1]) - return prj