From 6441da4d6ce12a5b02f5ae470a9399045d1bde8a Mon Sep 17 00:00:00 2001 From: Graham Date: Tue, 15 Oct 2019 23:49:23 +0200 Subject: [PATCH] 1.0.4 - bring back wait_for_id for local DPs --- CHANGELOG.md | 7 +- JSSImporter.py | 111 +++++++++++++++++-------------- pkg/jssimporter/build-info.plist | 2 +- version.plist | 2 +- 4 files changed, 69 insertions(+), 53 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index de64bcb..97615ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,11 @@ All notable changes to this project will be documented in this file. This projec - The above efforts to improve package upload reliability may conversely cause problems on setups with multiple DPs of different types. Scenarios involving Cloud plus Local DPs are not yet tested, and there probably needs to be a more intelligent method of treating each DP as a separate package upload process than currently exists. +## [1.0.4] - 2019-10-15 - 1.0.4 + +This is a bugfix release to improve performance for Jamf Cloud customers using a local DP as the Master. + + ## [1.0.3] - 2019-10-04 - 1.0.3 This is a bugfix release to address Issue #165 - local distribution points failing to upload new packages due to failing to obtain a package ID. The package ID check has been removed from local DPs, but left for cloud DPs. Tested and now working on JCDS and SMB DPs. @@ -17,7 +22,7 @@ This is a bugfix release to address Issue #165 - local distribution points faili ## [1.0.2] - 2019-09-25 - 1.0.2 -This is the official 1.0.2 release, exactly the same as the former 1.0.2b8. +This is the official 1.0.2 release, exactly the same as the former 1.0.2b8. I don't intend to use beta version numbering for bugfixes in the future. - @grahamrpugh added a new `wait_for_id` definition, which provides a common method to check for feedback on the upload of each API object, in an attempt to reduce the chance of cloud clusters returning conflicting information about whether an object has been successfully uploaded or not. - Verbosity is increased with respect to reporting object IDs. diff --git a/JSSImporter.py b/JSSImporter.py index 59147f6..5402542 100644 --- a/JSSImporter.py +++ b/JSSImporter.py @@ -38,7 +38,7 @@ __all__ = ["JSSImporter"] -__version__ = "1.0.3" +__version__ = "1.0.4" REQUIRED_PYTHON_JSS_VERSION = StrictVersion("2.0.1") @@ -365,27 +365,6 @@ def repo_type(self): return return repo - def wait_for_id(self, obj_cls, obj_name): - """wait for feedback that the object is there""" - object = None - search_method = getattr(self.jss, obj_cls.__name__) - # limit time to wait to get a package ID. - timeout = time.time() + 120 - while time.time() < timeout: - try: - object = search_method(obj_name) - if object.id != 0: - self.output("{} ID '{}' verified on server".format(obj_cls.__name__, object.id)) - self.upload_needed = True - return object - else: - self.output("Waiting to get {} ID from server (reported: {})...".format(obj_cls.__name__, - object.id)) - time.sleep(10) - except jss.GetError: - self.output("Waiting to get {} ID from server (none reported)...".format(obj_cls.__name__)) - time.sleep(10) - def handle_category(self, category_type, category_name=None): """Ensure a category is present.""" if self.env.get(category_type): @@ -406,8 +385,8 @@ def handle_category(self, category_type, category_name=None): try: category.id self.output( - "Category, type '{}', name '{}', created.".format(category_type, - category_name)) + "Category, type '{}', name '{}', created (ID: {}).".format(category_type, + category_name, category.id)) self.env["jss_changed_objects"]["jss_category_added"].append( category_name) except ValueError: @@ -416,6 +395,28 @@ def handle_category(self, category_type, category_name=None): category = None return category + def wait_for_id(self, obj_cls, obj_name): + """wait for feedback that the object is there""" + object = None + search_method = getattr(self.jss, obj_cls.__name__) + # limit time to wait to get a package ID. + timeout = time.time() + 60 + while time.time() < timeout: + try: + object = search_method(obj_name) + if int(object.id) != 0: + # self.output("{} ID '{}' verified on server".format(obj_cls.__name__, object.id)) + self.upload_needed = True + return object + else: + self.output("Waiting to get {} ID from server (reported: {})" + "...".format(obj_cls.__name__, object.id)) + time.sleep(10) + except jss.GetError: + self.output("Waiting to get {} ID from server (none reported)" + "...".format(obj_cls.__name__)) + time.sleep(10) + def handle_package(self): """Creates or updates, and copies a package object. @@ -469,33 +470,43 @@ def handle_package(self): self.copy(pkg_path) package = self.wait_for_id(jss.Package, self.pkg_name) try: - package.id + self.output("First pass (cloud): Package object ID: {}".format(package.id)) pkg_update = (self.env["jss_changed_objects"]["jss_package_added"]) except ValueError: raise ProcessorError("Failed to get Package ID from {}.".format(self.repo_type())) - elif self.repo_type() == "DP" or self.repo_type() == "SMB" or self.repo_type() == "AFP" or self.repo_type() == "Local": - # for AFP/SMB shares, we create the package object first and then copy the package - # if it is not already there - self.output("Creating Package object...") + elif (self.repo_type() == "DP" or self.repo_type() == "SMB" or + self.repo_type() == "AFP" or self.repo_type() == "Local"): + # For local DPs we check that the package is already on the distribution point and upload it if not + if self.jss.distribution_points.exists(os.path.basename(pkg_path)): + self.output("Package '{}' found, so copy to {} repo not required." + "(Delete package from repo and re-run recipe if you need to" + "update it).".format(self.pkg_name, self.repo_type())) + self.upload_needed = False + else: + self.copy(pkg_path) + self.output("Package '{}' copied to {} repo.".format(self.pkg_name, self.repo_type())) + self.upload_needed = True + + # next we create the package object as it is not already there + self.output("Creating Package object for '{}'...".format(self.pkg_name)) package = jss.Package(self.jss, self.pkg_name) + package.save() pkg_update = (self.env["jss_changed_objects"]["jss_package_added"]) + # test check + package = self.wait_for_id(jss.Package, package) + try: + self.output("First pass (local): Package object ID: {}".format(package.id)) + pkg_update = (self.env["jss_changed_objects"]["jss_package_added"]) + except ValueError: + raise ProcessorError("Failed to get Package ID from {}.".format(self.repo_type())) else: # repo type that is not supported raise ProcessorError( - "JSSImporter can't upload the Package at '{}'! Repo type {} is not supported. Please reconfigure your JSSImporter prefs.".format(pkg_path, self.repo_type())) - - # For local DPs we check that the package is already on the distribution point and upload it if not - if self.repo_type() == "DP" or self.repo_type() == "SMB" or self.repo_type() == "AFP" or self.repo_type() == "Local": - if self.jss.distribution_points.exists(os.path.basename(pkg_path)): - self.output("Package upload not required.") - self.upload_needed = False - else: - self.copy(pkg_path) - self.output("Package {} uploaded to distribution point.".format(self.pkg_name)) - self.upload_needed = True + "JSSImporter can't upload the Package at '{}'! Repo type {} is not supported." + "Please reconfigure your JSSImporter prefs.".format(pkg_path, self.repo_type())) - # only update the package object if an uploand ad was carried out + # only update the package object if an upload was carried out if (self.env["STOP_IF_NO_JSS_UPLOAD"] is True and not self.upload_needed): self.output("Not overwriting policy as upload requirement is determined as {} " @@ -518,17 +529,17 @@ def handle_package(self): package_boot_volume_required = self.env.get( "package_boot_volume_required") + self.wait_for_id(jss.Package, package) + try: + self.output("Second pass: Package object ID: {}".format(package.id)) + pkg_update = (self.env["jss_changed_objects"]["jss_package_added"]) + except ValueError: + raise ProcessorError("Failed to get Package ID from {}.".format(self.repo_type())) + if self.category is not None: cat_name = self.category.name else: cat_name = "" - if self.repo_type() == "JDS" or self.repo_type() == "CDP" or self.repo_type() == "AWS": - self.wait_for_id(jss.Package, self.pkg_name) - try: - package.id - pkg_update = (self.env["jss_changed_objects"]["jss_package_added"]) - except ValueError: - raise ProcessorError("Failed to get Package ID from {}.".format(self.repo_type())) self.update_object(cat_name, package, "category", pkg_update) self.update_object(os_requirements, package, "os_requirements", pkg_update) @@ -854,7 +865,7 @@ def update_or_create_new(self, obj_cls, template_path, name="", object.id # Retrieve the updated XML. recipe_object = search_method(name) - self.output("{} '{}' updated.".format(obj_cls.__name__, name)) + self.output("{} '{}' updated (ID: {}).".format(obj_cls.__name__, name, object.id)) if update_env: self.env["jss_changed_objects"][update_env].append(name) except ValueError: @@ -867,7 +878,7 @@ def update_or_create_new(self, obj_cls, template_path, name="", object = self.wait_for_id(obj_cls, name) try: object.id - self.output("{} '{}' created.".format(obj_cls.__name__, name)) + self.output("{} '{}' updated (ID: {}).".format(obj_cls.__name__, name, object.id)) if added_env: self.env["jss_changed_objects"][added_env].append(name) except ValueError: diff --git a/pkg/jssimporter/build-info.plist b/pkg/jssimporter/build-info.plist index 448d076..5a54ff1 100644 --- a/pkg/jssimporter/build-info.plist +++ b/pkg/jssimporter/build-info.plist @@ -17,6 +17,6 @@ suppress_bundle_relocation version - 1.0.3 + 1.0.4 diff --git a/version.plist b/version.plist index 767fd59..822b7fb 100644 --- a/version.plist +++ b/version.plist @@ -3,6 +3,6 @@ Version - 1.0.3 + 1.0.4