New Upstream Release - python-pbcommand

Ready changes

Summary

Merged new upstream version: 2.1.1+git20230613.724bfb6 (was: 2.1.1+git20220929.4045c8c).

Resulting package

Built on 2023-06-25T13:28 (took 9m25s)

The resulting binary packages can be installed (if you have the apt repository enabled) by running one of:

apt install -t fresh-releases python3-pbcommand

Lintian Result

Diff

diff --git a/debian/changelog b/debian/changelog
index 89ff307..221dae1 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,10 @@
+python-pbcommand (2.1.1+git20230613.724bfb6-1) UNRELEASED; urgency=low
+
+  * New upstream snapshot.
+  * New upstream snapshot.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Sun, 25 Jun 2023 13:19:44 -0000
+
 python-pbcommand (2.1.1+git20220616.3f2e6c2-2) unstable; urgency=medium
 
   * Team upload.
diff --git a/debian/patches/fix_version.patch b/debian/patches/fix_version.patch
index 75ec55e..46ce465 100644
--- a/debian/patches/fix_version.patch
+++ b/debian/patches/fix_version.patch
@@ -5,8 +5,10 @@ Description: For some reason pkg_resources.get_distribution('pbcommand') fails
  .
  FIXME: Needs to be adapted fro new upstream versions
 
---- a/pbcommand/__init__.py
-+++ b/pbcommand/__init__.py
+Index: python-pbcommand.git/pbcommand/__init__.py
+===================================================================
+--- python-pbcommand.git.orig/pbcommand/__init__.py
++++ python-pbcommand.git/pbcommand/__init__.py
 @@ -4,7 +4,7 @@ import sys
  try:
      __VERSION__ = pkg_resources.get_distribution('pbcommand').version
diff --git a/debian/patches/python3.patch b/debian/patches/python3.patch
index 5d0bf68..17ee651 100644
--- a/debian/patches/python3.patch
+++ b/debian/patches/python3.patch
@@ -2,40 +2,50 @@ Author: Andreas Tille <tille@debian.org>
 Last-Update: Sat, 07 Dec 2019 21:44:15 +0100
 Description: Force Python3 in some scripts
 
---- a/bin/extract-readme-snippets.py
-+++ b/bin/extract-readme-snippets.py
+Index: python-pbcommand.git/bin/extract-readme-snippets.py
+===================================================================
+--- python-pbcommand.git.orig/bin/extract-readme-snippets.py
++++ python-pbcommand.git/bin/extract-readme-snippets.py
 @@ -1,4 +1,4 @@
 -#!/usr/bin/env python
 +#!/usr/bin/python3
  
  """
  Pandoc filter to exact python code blocks and write each snippet out.
---- a/pbcommand/create_bundle_manifest.py
-+++ b/pbcommand/create_bundle_manifest.py
+Index: python-pbcommand.git/pbcommand/create_bundle_manifest.py
+===================================================================
+--- python-pbcommand.git.orig/pbcommand/create_bundle_manifest.py
++++ python-pbcommand.git/pbcommand/create_bundle_manifest.py
 @@ -1,4 +1,4 @@
 -#!/usr/bin/env python
 +#!/usr/bin/python3
  # Tool to generate the manifest.xml will the correct datetime of bundle
  # creation as well as add git sha and bamboo build metadata
  
---- a/pbcommand/services/resolver.py
-+++ b/pbcommand/services/resolver.py
+Index: python-pbcommand.git/pbcommand/services/resolver.py
+===================================================================
+--- python-pbcommand.git.orig/pbcommand/services/resolver.py
++++ python-pbcommand.git/pbcommand/services/resolver.py
 @@ -1,4 +1,4 @@
 -#!/usr/bin/env python
 +#!/usr/bin/python3
  
  """
  Utility to obtain paths to important analysis files from SMRT Link jobs,
---- a/pbcommand/testkit/nunit.py
-+++ b/pbcommand/testkit/nunit.py
+Index: python-pbcommand.git/pbcommand/testkit/nunit.py
+===================================================================
+--- python-pbcommand.git.orig/pbcommand/testkit/nunit.py
++++ python-pbcommand.git/pbcommand/testkit/nunit.py
 @@ -1,4 +1,4 @@
 -#!/usr/bin/env python
 +#!/usr/bin/python3
  
  """
  Generate an NUnit XML test report annotated with test issue keys, suitable
---- a/tests/test_testkit_xunit.py
-+++ b/tests/test_testkit_xunit.py
+Index: python-pbcommand.git/tests/test_testkit_xunit.py
+===================================================================
+--- python-pbcommand.git.orig/tests/test_testkit_xunit.py
++++ python-pbcommand.git/tests/test_testkit_xunit.py
 @@ -101,7 +101,7 @@ class TestXunitOutput:
      def test_merge_junit_files_cmdline(self):
          x1, x2 = self._get_junit_files()
diff --git a/pbcommand/models/common.py b/pbcommand/models/common.py
index 2652dce..8f53297 100644
--- a/pbcommand/models/common.py
+++ b/pbcommand/models/common.py
@@ -375,6 +375,7 @@ class FileTypes:
         "bw",
         MimeTypes.BINARY)
     CSV = FileType(to_file_ns('csv'), "file", "csv", MimeTypes.CSV)
+    TSV = FileType(to_file_ns('tsv'), "file", "tsv", MimeTypes.TXT)
     XML = FileType(to_file_ns('xml'), "file", "xml", MimeTypes.XML)
     HTML = FileType(to_file_ns('html'), "file", "html", MimeTypes.HTML)
     # Generic Json File
diff --git a/pbcommand/models/legacy.py b/pbcommand/models/legacy.py
index 22be5ab..d54419d 100644
--- a/pbcommand/models/legacy.py
+++ b/pbcommand/models/legacy.py
@@ -41,6 +41,27 @@ class Pipeline:
     def optional_inputs(self):
         return [o for o in self.entry_points if o.optional]
 
+    def to_d(self):
+        return {
+            "id": self.idx,
+            "name": self.display_name,
+            "description": self.description,
+            "entryPoints": self.entry_points,
+            "options": [],  # engine_options,
+            "taskOptions": self.task_options,
+            "tags": self.tags,
+            "schemaVersion": "2.0.0",
+            "_comment": "Automatically generated by pbcommand",
+            "version": self.version
+        }
+
+    def to_json(self):
+        return json.dumps(self.to_d(), indent=2)
+
+    def write_json(self, file_name):
+        with open(file_name, "wt") as json_out:
+            json_out.write(self.to_json())
+
     def __repr__(self):
         # Only communicate the entry id
         ek = [e["entryId"] for e in self.entry_points]
diff --git a/pbcommand/models/report.py b/pbcommand/models/report.py
index 660e913..6a6fceb 100644
--- a/pbcommand/models/report.py
+++ b/pbcommand/models/report.py
@@ -111,7 +111,7 @@ class BaseReportElement(metaclass=abc.ABCMeta):
         :param id_: (int) id of child BaseReportElement
         """
         if id_ in self._ids:
-            msg = "a plot with id '{i}' has already been added to {t}.".format(
+            msg = "an element with id '{i}' has already been added to {t}.".format(
                 i=id_, t=str(type(self)))
             log.error(msg)
             raise PbReportError(msg)
@@ -716,7 +716,7 @@ class Report(BaseReportElement):
         """
         Add a table to the report
         """
-        BaseReportElement.is_unique(self, table.id)
+        #BaseReportElement.is_unique(self, table.id)
         self._tables.append(table)
 
     def __repr__(self):
@@ -975,13 +975,14 @@ DATA_TYPES = {
 class AttributeSpec:
 
     def __init__(self, id_, name, description, type_, format_=None,
-                 is_hidden=False):
+                 is_hidden=False, scale=None):
         self.id = id_
         self.name = name
         self.description = description
         self._type = type_
         self.format_str = format_
         self.is_hidden = is_hidden
+        self.scale = scale
 
     @property
     def type(self):
@@ -995,7 +996,7 @@ class AttributeSpec:
         assert d["type"] in DATA_TYPES, d["type"]
         return AttributeSpec(d['id'].split(".")[-1], d['name'],
                              d['description'], d["type"], format_str,
-                             d.get("isHidden", False))
+                             d.get("isHidden", False), d.get("scale", None))
 
     def validate_attribute(self, attr):
         assert attr.id == self.id
@@ -1004,17 +1005,24 @@ class AttributeSpec:
                 i=self.id, v=type(attr.value).__name__, t=self.type)
             raise TypeError(msg)
 
+    def is_fractional_percentage(self):
+        if self.format_str:
+            m = validate_format(self.format_str)
+            return m.groups()[0] == 'p'
+        return False
+
 
 class ColumnSpec:
 
     def __init__(self, id_, header, description, type_, format_=None,
-                 is_hidden=False):
+                 is_hidden=False, scale=None):
         self.id = id_
         self.header = header
         self.description = description
         self._type = type_
         self.format_str = format
         self.is_hidden = is_hidden
+        self.scale = scale
 
     @property
     def type(self):
@@ -1028,7 +1036,7 @@ class ColumnSpec:
         assert d["type"] in DATA_TYPES, d["type"]
         return ColumnSpec(d['id'].split(".")[-1], d['header'],
                           d['description'], d["type"], format_str,
-                          d.get("isHidden", False))
+                          d.get("isHidden", False), d.get("scale", None))
 
     def validate_column(self, col):
         assert col.id == self.id
diff --git a/pbcommand/schemas/report_spec.avsc b/pbcommand/schemas/report_spec.avsc
index 38b4b4d..acaa54c 100644
--- a/pbcommand/schemas/report_spec.avsc
+++ b/pbcommand/schemas/report_spec.avsc
@@ -71,6 +71,12 @@
               "type": ["boolean", "null"],
               "default": null,
               "doc": "Flag to hide this attribute"
+            },
+            {
+              "name": "scale",
+              "type": ["int", "float", "null"],
+              "default": null,
+              "doc": "Scale factor to be multiplied times numerical values"
             }
           ]
         }
@@ -147,6 +153,12 @@
                       "type": ["boolean", "null"],
                       "default": null,
                       "doc": "Flag to hide this column"
+                    },
+                    {
+                      "name": "scale",
+                      "type": ["int", "float", "null"],
+                      "default": null,
+                      "doc": "Scale factor to be multiplied times numerical values"
                     }
                   ]
                 }
diff --git a/pbcommand/services/_service_access_layer.py b/pbcommand/services/_service_access_layer.py
index 77e5de1..5a46854 100644
--- a/pbcommand/services/_service_access_layer.py
+++ b/pbcommand/services/_service_access_layer.py
@@ -956,74 +956,101 @@ class ServiceAccessLayer:  # pragma: no cover
         return _process_rget(_to_url(self.uri, "{p}/{i}".format(
             i=pipeline_template_id, p=ServiceAccessLayer.ROOT_PT)), headers=self._get_headers())
 
-    def create_by_pipeline_template_id(self,
-                                       name,
-                                       pipeline_template_id,
-                                       epoints,
-                                       task_options=(),
-                                       workflow_options=(),
-                                       tags=()):
-        """Creates and runs an analysis workflow by pipeline template id
+    def get_pipeline_presets(self):
+        return _process_rget(_to_url(self.uri, "/smrt-link/workflow-presets"),
+                             headers=self._get_headers())
 
+    def get_pipeline_preset(self, preset_id):
+        presets = self.get_pipeline_presets()
+        by_id = {p["presetId"]: p for p in presets}
+        by_shortid = {p["presetId"].split(".")[-1]: p for p in presets}
+        by_name = {p["name"]: p for p in presets}
+        return by_id.get(preset_id,
+                         by_name.get(preset_id,
+                                     by_shortid.get(preset_id, None)))
+
+    def create_by_pipeline_template_id(self, *args, **kwds):
+        return self.create_analysis_job(*args, **kwds)
+
+    def create_analysis_job(self,
+                            name,
+                            pipeline_id,
+                            epoints,
+                            task_options=(),
+                            workflow_options=(),
+                            tags=(),
+                            preset_id=None,
+                            description=None,
+                            project_id=1):
+        """Creates and runs an analysis workflow by workflow ID
 
         :param tags: Tags should be a set of strings
         """
-        if pipeline_template_id.startswith("pbsmrtpipe"):
+        if pipeline_id.startswith("pbsmrtpipe"):
             raise NotImplementedError("pbsmrtpipe is no longer supported")
 
         # sanity checking to see if pipeline is valid
-        _ = self.get_pipeline_template_by_id(pipeline_template_id)
+        _ = self.get_pipeline_template_by_id(pipeline_id)
 
-        seps = [
-            dict(
-                entryId=e.entry_id,
-                fileTypeId=e.dataset_type,
-                datasetId=e.resource) for e in epoints]
+        service_eps = [dict(entryId=e.entry_id,
+                            fileTypeId=e.dataset_type,
+                            datasetId=e.resource) for e in epoints]
 
         def _to_o(opt_id, opt_value, option_type_id):
-            return dict(optionId=opt_id, value=opt_value,
+            return dict(optionId=opt_id,
+                        value=opt_value,
                         optionTypeId=option_type_id)
 
         task_options = list(task_options)
         d = dict(name=name,
-                 pipelineId=pipeline_template_id,
-                 entryPoints=seps,
+                 pipelineId=pipeline_id,
+                 entryPoints=service_eps,
                  taskOptions=task_options,
-                 workflowOptions=workflow_options)
+                 workflowOptions=workflow_options,
+                 projectId=project_id)
+        if description:
+            d["description"] = description
+        if preset_id:
+            preset = self.get_pipeline_preset(preset_id)
+            if preset is None:
+                raise KeyError(f"Can't find a compute config for '{preset_id}'")
+            d["presetId"] = preset["presetId"]
 
         # Only add the request if the non empty.
         if tags:
             tags_str = ",".join(list(tags))
             d['tags'] = tags_str
         job_type = JobTypes.ANALYSIS
-        raw_d = _process_rpost(_to_url(self.uri,
-                                       "{r}/{p}".format(p=job_type,
-                                                        r=ServiceAccessLayer.ROOT_JOBS)),
+        path = "{r}/{p}".format(p=job_type, r=ServiceAccessLayer.ROOT_JOBS)
+        raw_d = _process_rpost(_to_url(self.uri, path),
                                d,
                                headers=self._get_headers())
         return ServiceJob.from_d(raw_d)
 
-    def run_by_pipeline_template_id(self,
-                                    name,
-                                    pipeline_template_id,
-                                    epoints,
-                                    task_options=(),
-                                    workflow_options=(),
-                                    time_out=JOB_DEFAULT_TIMEOUT,
-                                    tags=(),
-                                    abort_on_interrupt=True,
-                                    retry_on_failure=False):
+    def run_by_pipeline_template_id(self, *args, **kwds):
+        return self.run_analysis_job(*args, **kwds)
+
+    def run_analysis_job(self,
+                         name,
+                         pipeline_id,
+                         epoints,
+                         task_options=(),
+                         workflow_options=(),
+                         time_out=JOB_DEFAULT_TIMEOUT,
+                         tags=(),
+                         abort_on_interrupt=True,
+                         retry_on_failure=False):
         """Blocks and runs a job with a timeout"""
 
-        job_or_error = self.create_by_pipeline_template_id(
+        job_or_error = self.create_analysis_job(
             name,
-            pipeline_template_id,
+            pipeline_id,
             epoints,
             task_options=task_options,
             workflow_options=workflow_options,
             tags=tags)
 
-        _d = dict(name=name, p=pipeline_template_id, eps=epoints)
+        _d = dict(name=name, p=pipeline_id, eps=epoints)
         custom_err_msg = "Job {n} args: {a}".format(n=name, a=_d)
 
         job_id = _job_id_or_error(job_or_error, custom_err_msg=custom_err_msg)
@@ -1252,7 +1279,8 @@ class SmrtLinkAuthClient(ServiceAccessLayer):  # pragma: no cover
     def _get_headers(self):
         return {
             "Authorization": "Bearer {}".format(self.auth_token),
-            "Content-type": "application/json"
+            "Content-type": "application/json",
+            "X-User-ID": self._user
         }
 
     def _to_base_url(self, h):
diff --git a/setup.py b/setup.py
index 7499d89..d994b7f 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@ test_deps = [
 
 setup(
     name='pbcommand',
-    version='2.4.0',
+    version='2.4.4',
     author='Pacific Biosciences',
     author_email='devnet@pacificbiosciences.com',
     description='Library and Tools for interfacing with PacBio® data CLI tools',

Debdiff

[The following lists of changes regard files as different if they have different names, permissions or owners.]

Files in second set of .debs but not in first

-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.4.egg-info/PKG-INFO
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.4.egg-info/dependency_links.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.4.egg-info/not-zip-safe
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.4.egg-info/requires.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.4.egg-info/top_level.txt

Files in first set of .debs but not in second

-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.0.egg-info/PKG-INFO
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.0.egg-info/dependency_links.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.0.egg-info/not-zip-safe
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.0.egg-info/requires.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/pbcommand-2.4.0.egg-info/top_level.txt

No differences were encountered in the control files

More details

Full run details