Commit e9f10f52 authored by Martin Hamant's avatar Martin Hamant 💬
Browse files

print() to logger

parent 6c0bd158
Pipeline #16763 passed with stages
in 44 seconds
......@@ -85,7 +85,7 @@ def record_orphan(entry):
except OSError as e:
logger.error('cannot update file: {}'.format(e))
except json.decoder.JSONDecodeError:
print("JSON decode error")
logger.error("JSON decode error")
def cache_gitlab_group(namespace):
......@@ -100,7 +100,7 @@ def cache_gitlab_group(namespace):
cacheGroup['meta'] = gitlabGroupMeta.json()
return True
except requests.exceptions.HTTPError as e:
print("There was a problem in getting group's metadata : {}".format(e))
logger.error("There was a problem in getting group's metadata : {}".format(e))
cacheGroup.clear()
return False
......@@ -120,12 +120,12 @@ def cache_gitlab_group(namespace):
cacheGroup['projects'].extend([p for p in gitlabGroup.json()])
return True
except requests.exceptions.HTTPError as e:
print("There was a problem in getting group's projects : {}".format(e))
logger.error("There was a problem in getting group's projects : {}".format(e))
cacheGroup.clear()
return False
def do_update(namespace):
print('updating group cache')
logger.debug(f'updating group cache for {namespace}')
cacheGroup.clear()
# first we collect the group metadata only
if not get_meta(namespace):
......@@ -161,7 +161,7 @@ def add_project_to_gitlab(namespace, repoName, repoUrl, issuesEnabledFor=False):
The destination namespace should exists beforehand
"""
# print("invoke add project {}/{} ...".format(namespace, repoName))
logger.debug("invoke add project {}/{} ...".format(namespace, repoName))
# processing only if the cache is populated with the group data
if cache_gitlab_group(namespace):
......@@ -169,14 +169,14 @@ def add_project_to_gitlab(namespace, repoName, repoUrl, issuesEnabledFor=False):
# we don't process if the project already exists in gitlab
for project in cacheGroup['projects']:
if project['name'] == repoName:
# print('project "{}" already exists in GitLab group "{}"'.format(
# repoName, namespace))
logger.debug('project "{}" already exists in GitLab group "{}"'.format(
repoName, namespace))
return False
# unsure if the group is intended for mirroring
# We don't want to mix up human maintained repo with mirrored ones
if(cacheGroup['meta']['description'] == 'type: mirror'):
print('would create ' + namespace + '/'
+ repoName + ' and description ' + repoUrl)
logger.info('would create ' + namespace + '/'
+ repoName + ' and description ' + repoUrl)
payload = {
'namespace_id': cacheGroup['meta']['id'],
......@@ -203,18 +203,18 @@ def add_project_to_gitlab(namespace, repoName, repoUrl, issuesEnabledFor=False):
}
if config.args.dryrun:
print('dryrun. The following actions would be made:')
print('GitLab POST payload {}'.format(payload))
logger.info('dryrun. The following actions would be made:')
logger.info('GitLab POST payload {}'.format(payload))
else:
print("creating project {}/{} ...".format(namespace, repoName))
logger.info("creating project {}/{} ...".format(namespace, repoName))
newrepo = requests.post(
config.globalCfg['gitlabApiUrl'] + '/projects', params=payload, headers=gitlabHeaders)
if not newrepo.status_code == 201: #  201 is "created" in GitLab API
pp.pprint(newrepo.json())
print('Error from GitLab (repo "{}/{}"): {}'.format(namespace,
repoName, newrepo.json()['message']))
print(
logger.debug(pp.pformat(newrepo.json()))
logger.error('Error from GitLab (repo "{}/{}"): {}'.format(namespace,
repoName, newrepo.json()['message']))
logger.error(
'in case of error \"namespace is not valid\" check if the gitlab user is a member of the target group')
# tweak issue tracker permissions if needed
......@@ -224,13 +224,13 @@ def add_project_to_gitlab(namespace, repoName, repoUrl, issuesEnabledFor=False):
config.globalCfg['gitlabApiUrl'] + f"/projects/{newrepo.json()['id']}",
params={'issues_access_level': 'private'}, headers=gitlabHeaders)
if not tweakrepo.status_code == 200:
print('Error from GitLab (repo "{}/{}"): {}'.format(namespace,
repoName, tweakrepo.json()['message']))
logger.error('Error from GitLab (repo "{}/{}"): {}'.format(namespace,
repoName, tweakrepo.json()['message']))
else:
raise GitlabGroupError(
'group {} description is not configured for mirroring'.format(namespace))
else:
print("Can't update group cache for {}".format(namespace))
logger.error("Can't update group cache for {}".format(namespace))
# print(repos.text)
......@@ -272,9 +272,9 @@ def is_valid_source(repo, validRepoTypes):
def print_github_limits(headers):
if 'X-RateLimit-Remaining' in headers:
if int(headers['X-RateLimit-Remaining']) < 5:
print(red('warning ! reaching GitHub API limits'))
logger.warning('warning ! reaching GitHub API limits')
print(
logger.warning(
'GitHub\'s X-RateLimit-Remaining : {}'.format(headers['X-RateLimit-Remaining']))
......@@ -290,7 +290,7 @@ def prepare_repos(doGetSize=False, **itemSettings):
# get a single repo, providing a full name aka namespace/reponame
# warning : a single repo can be any of type : fork, archive
# Endpoint is like https://api.github.com/repos/shumatech/BOSSA
print("single repo")
logger.info("single repo sync")
# TODO factorize github api calls some day
_ghRepo = '{}/{}'.format(itemSettings['ghNs'], itemSettings['ghRepo'])
repo = sessionGihubApi.get(
......@@ -306,7 +306,8 @@ def prepare_repos(doGetSize=False, **itemSettings):
add_project_to_gitlab(dstGroup, repo['name'], repo['clone_url'],
issuesEnabledFor=itemSettings['issuesEnabledFor'])
elif repo.status_code == requests.codes.not_found:
print('GitHub repo {} has not been found (404)\nReason: {}'.format(_ghRepo, repo.text))
logger.warning(
'GitHub repo {} has not been found (404)\nReason: {}'.format(_ghRepo, repo.text))
# Is it only the repo that got deleted or the whole github org ?
_ghOrg = sessionGihubApi.get(
......@@ -323,7 +324,7 @@ def prepare_repos(doGetSize=False, **itemSettings):
# TODO : Do something to delete the repos from GitLab, if exists
# TODO : Because if it exists in GitLab but no more in GitHub, it means its been deleted
else:
print('unhandled github HTTP return code')
logger.error('unhandled github HTTP return code')
else:
# a whole namespace
......@@ -349,7 +350,7 @@ def prepare_repos(doGetSize=False, **itemSettings):
repos.links['next']['url'], timeout=5)
if repos.status_code != requests.codes.ok:
print(repos.text)
logger.critical(f"can't get next page from GitHub API : {repos.text}")
exit()
for repo in repos.json():
......@@ -358,10 +359,10 @@ def prepare_repos(doGetSize=False, **itemSettings):
if not itemSettings['ghRepoOnlyList'] or (itemSettings['ghRepoOnlyList'] and repo['name'] in itemSettings['ghRepoOnlyList']):
reposSize += repo['size']
print("Considering {} repositories in this github org".format(
logger.info("Considering {} repositories in this github org".format(
len(ghRepos)))
if itemSettings['ghRepoOnlyList']:
print("only {} of them will be sync'ed".format(
logger.info("only {} of them will be sync'ed".format(
len(itemSettings['ghRepoOnlyList'])))
if cache_gitlab_group(dstGroup):
......@@ -369,11 +370,11 @@ def prepare_repos(doGetSize=False, **itemSettings):
# we sort out if the configured repos are still in GitHub ('only' list)
if itemSettings['ghRepoOnlyList']:
if set(itemSettings['ghRepoOnlyList']) <= set(pInGitLab):
print("all configured repos are in Github")
logger.info("all configured repos are in Github")
_orphans = set(pInGitLab) - set(itemSettings['ghRepoOnlyList'])
if _orphans:
print(
logger.warning(
'Warning, "ONLY" list: some of the existing GitLab repos in this group'
' are not specified in the source configuration. See orphan log.')
_pApiUrls = {p['path_with_namespace']: '{}/projects/{}'.format(config.globalCfg['gitlabApiUrl'], p['id']) for p in cacheGroup['projects']
......@@ -385,10 +386,10 @@ def prepare_repos(doGetSize=False, **itemSettings):
# we print out repos that are in GitLab but not in GitHub (anymore)
_reposNotInGH = list(set(pInGitLab) - set(ghRepos.keys()))
if _reposNotInGH:
print(
logger.warning(
"Warning: some of the GitLab repos doesn't exist or have special"
"status in GitHub org (archive, fork). See orphan log.")
print("Consider deleting them from GitLab")
logger.warning("Consider deleting them from GitLab")
_pApiUrls = {p['path_with_namespace']: '{}/projects/{}'.format(config.globalCfg['gitlabApiUrl'], p['id']) for p in cacheGroup['projects']
for orphan in _reposNotInGH if orphan == p['name']}
record_orphan(_pApiUrls)
......@@ -409,7 +410,7 @@ def prepare_repos(doGetSize=False, **itemSettings):
if cache_gitlab_group(dstGroup):
# GitLab group exists
print("existing GitLab group '{}' (id: {}) doesn't have a valid matching GitHub org source (currently {})".format(
logger.warning("existing GitLab group '{}' (id: {}) doesn't have a valid matching GitHub org source (currently {})".format(
cacheGroup['meta']['name'], cacheGroup['meta']['id'], itemSettings['ghNs']))
raise(GithubError(
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment