Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Update refresh_project_copyrights.py
Added error handling when no origin and no external ID
  • Loading branch information
LuckySkyWalker authored May 22, 2025
commit 5ed3923c5ee88f5984cec3c7013deecbd41e1c72
68 changes: 55 additions & 13 deletions examples/client/refresh_project_copyrights.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
#
import http.client
from sys import api_version
import sys
import csv
import datetime
from blackduck import Client
Expand All @@ -26,6 +27,10 @@ def RepDebug(level, msg):
return True
return False

def RepWarning(msg):
print("WARNING: " + msg)
return True


# Parse command line arguments
parser = argparse.ArgumentParser("Refresh copyrights for project/version components")
Expand All @@ -46,14 +51,23 @@ def RepDebug(level, msg):
parser.add_argument("--debug", dest='debug', type=int, default=0, help="Debug verbosity (0=none)")

parser.add_argument("--no-verify", dest='verify', action='store_false', help="Disable TLS certificate verification")
parser.add_argument("-t", "--timeout", default=15, type=int, help="Adjust the (HTTP) session timeout value (default: 15s)")
parser.add_argument("-r", "--retries", default=3, type=int, help="Adjust the number of retries on failure (default: 3)")

args = parser.parse_args()

# open the access token file
with open(args.token_file, 'r') as tf:
access_token = tf.readline().strip()

# access the Black Duck platform
bd = Client(base_url=args.base_url, token=access_token, verify=args.verify)
bd = Client(
base_url=args.base_url,
token=access_token,
verify=args.verify,
timeout=args.timeout,
retries=args.retries,
)

# initialise
all_my_comp_data = []
Expand Down Expand Up @@ -102,6 +116,9 @@ def RepDebug(level, msg):
my_statistics['_cntVersions'] = 0
my_statistics['_cntComponents'] = 0
my_statistics['_cntRefresh'] = 0
my_statistics['_cntNoOrigins'] = 0
my_statistics['_cntNoIDs'] = 0


# record any control values
if args.project_name:
Expand Down Expand Up @@ -187,25 +204,48 @@ def RepDebug(level, msg):
break

my_statistics['_cntComponents'] += 1
RepDebug(4, ' Component: %s' % this_comp_data['componentName'])

# refresh the copyrights for this component
url = this_comp_data['origins'][0]['origin']
url += "/copyrights-refresh"
RepDebug(4, ' Component: %s (%s)' %
(this_comp_data['componentName'], this_comp_data['componentVersionName']))

if this_comp_data['inputExternalIds'].__len__() > 0:
inputExternalIds = this_comp_data['inputExternalIds'][0]
else:
my_statistics['_cntNoIDs'] += 1
inputExternalIds = "n/a"
RepDebug(2, ' ID: %s' % inputExternalIds)

response = bd.session.put(url, data=None, **refresh_kwargs)
RepDebug(5,'Refresh response %s' % response)

inputExternalIds = this_comp_data['inputExternalIds'][0]
RepDebug(2, ' ID: %s' % inputExternalIds)
# refresh the copyrights for this component
if this_comp_data['origins'].__len__() > 0:
url = this_comp_data['origins'][0]['origin']
else:
# no origins
RepWarning('No origin defined for [%s]' % this_comp_data['componentVersion'])
# url = this_comp_data['componentVersion']
url = ''

if len(url) > 0:
# refresh end point
url += "/copyrights-refresh"

try:
response = bd.session.put(url, data=None, **refresh_kwargs)
RepDebug(5,'Refresh response %s' % response)
except urllib3.exceptions.ReadTimeoutError:
print('Failed to confirm copyrights refresh')

my_statistics['_cntRefresh'] += 1
else:
my_statistics['_cntNoOrigins'] += 1
url = 'n/a'

my_statistics['_cntRefresh'] += 1

# if recording the data - perhaps outputting to a CSV file
if args.dump_data:
my_data = {}
my_data['componentName'] = this_comp_data['componentName']
my_data['componentVersion'] = this_comp_data['componentVersionName']
my_data['url'] = url

if hasattr(args, 'debug') and 5 <= args.debug:
pprint(my_data)
Expand Down Expand Up @@ -234,7 +274,8 @@ def RepDebug(level, msg):
with open(args.csv_file, 'w') as csv_f:
field_names = [
'Component',
'Component Version'
'Component Version',
'Url'
]

writer = csv.DictWriter(csv_f, fieldnames=field_names)
Expand All @@ -243,7 +284,8 @@ def RepDebug(level, msg):
for my_comp_data in all_my_comp_data:
row_data = {
'Component': my_comp_data['componentName'],
'Component Version': my_comp_data['componentVersion']
'Component Version': my_comp_data['componentVersion'],
'Url': my_comp_data['url']
}
writer.writerow(row_data)
else:
Expand Down
Loading