diff --git a/scripts/release_issue_status/auto_close.py b/scripts/release_issue_status/auto_close.py
deleted file mode 100644
index ca439398aa59..000000000000
--- a/scripts/release_issue_status/auto_close.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import datetime
-
-import requests
-from bs4 import BeautifulSoup
-
-
-def auto_close_issue(sdk_repo, item):
- issue_number, package_name = item.issue_object.number, item.package
- issue_info = sdk_repo.get_issue(number=issue_number)
- issue_author = issue_info.user.login
- last_comment = list(issue_info.get_comments())[-1]
- last_comment_date = last_comment.created_at
- last_version, last_time = get_last_released_date(package_name)
- if last_time and last_time > last_comment_date:
- comment = f'Hi @{issue_author}, pypi link: https://pypi.org/project/{package_name}/{last_version}/'
- issue_info.create_comment(body=comment)
- issue_info.edit(state='closed')
- item.labels.append('auto-closed')
- item.issue_object.set_labels(*item.labels)
- print(f"issue number:{issue_number} has been closed!")
-
-
-def get_last_released_date(package_name):
- pypi_link = f'https://pypi.org/project/{package_name}/#history'
- res = requests.get(pypi_link)
- soup = BeautifulSoup(res.text, 'html.parser')
- # find top div from
- try:
- package_info = soup.select('div[class="release-timeline"]')[0].find_all('div')[0]
- last_version_mix = package_info.find_all('p', class_="release__version")[0].contents[0]
- except IndexError as e:
- return '', ''
- last_version = last_version_mix.replace(' ', '').replace('\n', '')
- last_version_date_str = package_info.time.attrs['datetime'].split('+')[0]
- last_version_date = datetime.datetime.strptime(last_version_date_str, '%Y-%m-%dT%H:%M:%S')
- return last_version, last_version_date
diff --git a/scripts/release_issue_status/auto_pipeline_run.py b/scripts/release_issue_status/auto_pipeline_run.py
deleted file mode 100644
index ef35ee2612fe..000000000000
--- a/scripts/release_issue_status/auto_pipeline_run.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import json
-import os
-import re
-from msrest.authentication import BasicAuthentication
-from azure.devops.v6_0.pipelines.pipelines_client import PipelinesClient
-from azure.devops.v6_0.pipelines import models
-import requests
-
-
-def run_pipeline(issue_link, sdk_issue_object, pipeline_url):
- paramaters = {
- "stages_to_skip": [],
- "resources": {
- "repositories": {
- "self": {
- "refName": "refs/heads/main"
- }
- }
- },
- "variables": {
- "BASE_BRANCH": {
- "value": f"{sdk_issue_object.head.label}",
- "isSecret": False
- },
- "ISSUE_LINK": {
- "value": f"{issue_link}",
- "isSecret": False
- },
- "PIPELINE_LINK": {
- "value": f"{pipeline_url}",
- "isSecret": False
- }
- }
- }
- # Fill in with your personal access token and org URL
- personal_access_token = os.getenv('PIPELINE_TOKEN')
- organization_url = 'https://dev.azure.com/azure-sdk'
-
- # Create a connection to the org
- credentials = BasicAuthentication('', personal_access_token)
- run_parameters = models.RunPipelineParameters(**paramaters)
- client = PipelinesClient(base_url=organization_url, creds=credentials)
- result = client.run_pipeline(project='internal',pipeline_id=2500,run_parameters=run_parameters)
- if result.state == 'inProgress':
- return True
- else:
- return False
diff --git a/scripts/release_issue_status/get_python_pipeline.py b/scripts/release_issue_status/get_python_pipeline.py
deleted file mode 100644
index 5b0939a704d2..000000000000
--- a/scripts/release_issue_status/get_python_pipeline.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import os
-import re
-
-from msrest.authentication import BasicAuthentication
-from azure.devops.v6_0.pipelines.pipelines_client import PipelinesClient
-
-
-def get_python_pipelines():
- python_piplines = {}
- pipeline_client = PipelinesClient(base_url='https://dev.azure.com/azure-sdk',
- creds=BasicAuthentication('', os.getenv('PIPELINE_TOKEN')))
- pipelines = pipeline_client.list_pipelines(project='internal')
- for pipeline in pipelines:
- if re.findall('^python - \w*$', pipeline.name):
- key = pipeline.name.replace('python - ', '')
- python_piplines[key] = pipeline.id
- return python_piplines
-
-
-def get_pipeline_url(python_piplines, output_folder):
- definitionId = python_piplines.get(output_folder)
- if definitionId:
- pipeline_url = 'https://dev.azure.com/azure-sdk/internal/_build?definitionId={}'.format(definitionId)
- else:
- print('Cannot find definitionId, Do not display pipeline_url')
- pipeline_url = ''
- return pipeline_url
diff --git a/scripts/release_issue_status/main.py b/scripts/release_issue_status/main.py
index 64c5d1ac4bd5..6bc1289cfc0a 100644
--- a/scripts/release_issue_status/main.py
+++ b/scripts/release_issue_status/main.py
@@ -4,20 +4,22 @@
from datetime import date, datetime
import subprocess as sp
import traceback
+import logging
from github import Github
from azure.storage.blob import BlobClient
import reply_generator as rg
-from update_issue_body import update_issue_body, find_readme_and_output_folder
-from auto_close import auto_close_issue
-from get_python_pipeline import get_python_pipelines, get_pipeline_url
+from utils import update_issue_body, get_readme_and_output_folder, \
+ get_python_pipelines, get_pipeline_url, auto_close_issue
_NULL = ' '
_FILE_OUT = 'release_issue_status.csv'
_FILE_OUT_PYTHON = 'release_python_status.md'
_PYTHON_SDK_ADMINISTRATORS = {'msyyc', 'RAY-316', 'BigCat20196'}
+logging.basicConfig(level=logging.INFO,
+ format='[auto-reply log] - %(funcName)s[line:%(lineno)d] - %(levelname)s: %(message)s')
def my_print(cmd):
print('==' + cmd + ' ==\n')
@@ -127,46 +129,42 @@ def _latest_comment_time(comments, delay_from_create_date):
def auto_reply(item, request_repo, rest_repo, sdk_repo, duplicated_issue, python_piplines):
- print("==========new issue number: {}".format(item.issue_object.number))
- if 'Configured' in item.labels:
- item.labels.remove('Configured')
+ logging.info("new issue number: {}".format(item.issue_object.number))
if 'auto-link' not in item.labels:
- item.labels.append('auto-link')
- item.issue_object.set_labels(*item.labels)
+ item.issue_object.add_to_labels('auto-link')
try:
package_name, readme_link, output_folder = update_issue_body(request_repo, rest_repo, item.issue_object.number)
- print("pkname, readme", package_name, readme_link)
+ logging.info("pkname, readme", package_name, readme_link)
item.package = package_name
key = ('Python', item.package)
duplicated_issue[key] = duplicated_issue.get(key, 0) + 1
except Exception as e:
item.bot_advice = 'failed to modify the body of the new issue. Please modify manually'
- item.labels.append('attention')
- item.issue_object.set_labels(*item.labels)
- print(e)
+ item.issue_object.add_to_labels('attention')
+ logging.info(e)
raise
else:
try:
- readme_link, output_folder = find_readme_and_output_folder(request_repo, rest_repo, item.issue_object.number)
+ readme_link, output_folder = get_readme_and_output_folder(request_repo, rest_repo, item.issue_object.number)
except Exception as e:
- print('Issue: {} updates body failed'.format(item.issue_object.number))
- item.bot_advice = 'failed to find Readme link, Please check !!'
- item.labels.append('attention')
- item.issue_object.set_labels(*item.labels)
+ logging.info('Issue: {} get pkname and output folder failed'.format(item.issue_object.number))
+ item.bot_advice = 'failed to find Readme link and output folder. Please check !!'
+ item.issue_object.add_to_labels('attention')
+ logging.info(e)
raise
try:
- print("*********************")
- print(python_piplines)
+ logging.info(python_piplines)
pipeline_url = get_pipeline_url(python_piplines, output_folder)
rg.begin_reply_generate(item=item, rest_repo=rest_repo, readme_link=readme_link,
sdk_repo=sdk_repo, pipeline_url=pipeline_url)
+ if 'Configured' in item.labels:
+ item.issue_object.remove_from_labels('Configured')
except Exception as e:
item.bot_advice = 'auto reply failed, Please intervene manually !!'
- print('Error from auto reply ========================')
- print('Issue:{}'.format(item.issue_object.number))
- print(traceback.format_exc())
- print('==============================================')
+ logging.info('Error from auto reply')
+ logging.info('Issue:{}'.format(item.issue_object.number))
+ logging.info(traceback.format_exc())
def main():
@@ -220,10 +218,19 @@ def main():
# rule6: if delay from created date is over 30 days and owner never reply, close it.
# rule7: if delay from created date is over 15 days and owner never reply, remind owner to handle it.
for item in issue_status:
+ if item.language == 'Python':
+ issue_status_python.append(item)
if item.status == 'release':
item.bot_advice = 'better to release asap.'
elif (item.comment_num == 0 or 'Configured' in item.labels) and 'Python' in item.labels:
item.bot_advice = 'new issue and better to confirm quickly.'
+ if 'assigned' not in item.labels:
+ time.sleep(0.1)
+ assign_count = int(str(time.time())[-1]) % 2
+ if assign_count == 1:
+ item.issue_object.remove_from_assignees(*['RAY-316'])
+ item.issue_object.add_to_assignees(*['BigCat20196'])
+ item.issue_object.add_to_labels('assigned')
try:
auto_reply(item, request_repo, rest_repo, sdk_repo, duplicated_issue, python_piplines)
except Exception as e:
@@ -237,25 +244,20 @@ def main():
auto_close_issue(request_repo, item)
except Exception as e:
item.bot_advice = 'auto-close failed, please check!'
- print(f"=====issue: {item.issue_object.number}, {e}")
+ logging.info(f"=====issue: {item.issue_object.number}, {e}")
if item.days_from_latest_commit >= 30 and item.language == 'Python' and '30days attention' not in item.labels:
- item.labels.append('30days attention')
- item.issue_object.set_labels(*item.labels)
+ item.issue_object.add_to_labels('30days attention')
item.issue_object.create_comment(f'hi @{item.author}, the issue is closed since there is no reply for a long time. Please reopen it if necessary or create new one.')
item.issue_object.edit(state='close')
elif item.days_from_latest_commit >= 15 and item.language == 'Python' and '15days attention' not in item.labels:
item.issue_object.create_comment(f'hi @{item.author}, this release-request has been delayed more than 15 days,'
' please deal with it ASAP. We will close the issue if there is still no response after 15 days!')
- item.labels.append('15days attention')
- item.issue_object.set_labels(*item.labels)
+ item.issue_object.add_to_labels('15days attention')
# judge whether there is duplicated issue for same package
if item.package != _NULL and duplicated_issue.get((item.language, item.package)) > 1:
item.bot_advice = f'Warning:There is duplicated issue for {item.package}. ' + item.bot_advice
-
- if item.language == 'Python':
- issue_status_python.append(item)
# output result
output_python_md(issue_status_python)
diff --git a/scripts/release_issue_status/release_issue_status.yml b/scripts/release_issue_status/release_issue_status.yml
index 0e3146fdee9e..410e1952c99f 100644
--- a/scripts/release_issue_status/release_issue_status.yml
+++ b/scripts/release_issue_status/release_issue_status.yml
@@ -38,7 +38,8 @@ jobs:
export TOKEN=$(USR_TOKEN)
export HEADERS=$(PIPELINE_HEADERS)
export URL=$(PIPELINE_URL)
- export PIPELINE_TOKEN = $(PIPELINE_TOKEN)
+ export PIPELINE_TOKEN=$(USR_PIPELINE_TOKEN)
+ export COOKIE=$(USR_TOKEN)
# create virtual env
python -m venv venv-sdk
diff --git a/scripts/release_issue_status/reply_generator.py b/scripts/release_issue_status/reply_generator.py
index eddf8a9fee02..ad586e04fac7 100644
--- a/scripts/release_issue_status/reply_generator.py
+++ b/scripts/release_issue_status/reply_generator.py
@@ -1,10 +1,13 @@
-import auto_pipeline_run as apr
+from utils import run_pipeline
import re
+import logging
issue_object_rg = None
+logging.basicConfig(level=logging.INFO,
+ format='[auto-reply log] - %(funcName)s[line:%(lineno)d] - %(levelname)s: %(message)s')
-def weather_change_readme(rest_repo, link_dict, labels):
+def readme_comparison(rest_repo, link_dict, labels):
# to see whether need change readme
contents = str(rest_repo.get_contents(link_dict['readme_path']).decoded_content)
pattern_tag = re.compile(r'tag: package-[\w+-.]+')
@@ -13,6 +16,8 @@ def weather_change_readme(rest_repo, link_dict, labels):
whether_multi_api = 'multi-api' in readme_python_contents
whether_same_tag = link_dict['readme_tag'] in package_tag
whether_change_readme = not whether_same_tag or whether_multi_api and not 'MultiAPI' in labels
+ if 'Configured' in labels:
+ whether_change_readme = False
return whether_change_readme
@@ -47,55 +52,26 @@ def get_latest_pr_from_readme(rest_repo, link_dict):
return latest_pr_number_int[-1]
-def latest_pr_parse(rest_repo, latest_pr_number):
- latest_pr = rest_repo.get_issue(latest_pr_number)
- latest_pr_comments = latest_pr.get_comments()
- b = [i for i in latest_pr_comments]
- for comment in latest_pr_comments:
- if '
Swagger Generation Artifacts
' in comment.body:
- return swagger_generator_parse(comment.body, latest_pr_number)
-
-
-def swagger_generator_parse(context, latest_pr_number):
- track1_info_model = ''
- try:
- if '
azure-sdk-for-python' in context:
- pattern_python_t1 = re.compile('
azure-sdk-for-python.+?', re.DOTALL)
- python_t1 = re.search(pattern_python_t1, context).group()
- prttern_python_track1 = re.compile('
\s+?- \s+?', re.DOTALL)
- python_track1_info = re.search(prttern_python_track1, python_t1).group()
- track1_info_model = '
python-track1
{} '.format(
- python_track1_info)
- except Exception as e:
- print('track1 generate error')
- pattern_python = re.compile(' azure-sdk-for-python-track2.+?', re.DOTALL)
- python = re.search(pattern_python, context).group()
- # the way that reply not contains [Release SDK Changes]
- # pattern_python_track2 = re.compile('\s*?- \s*?', re.DOTALL)
- pattern_python_track2 = re.compile('track2_.*
', re.DOTALL)
- python_track2_info = re.search(pattern_python_track2, python).group()
- track2_info_model = ' python-track2
{} '.format(
- python_track2_info)
- pattern_sdk_changes = re.compile('/azure-sdk-for-python/pull/\d*">Release SDK Changes', re.DOTALL)
- sdk_link = re.search(pattern_sdk_changes, python_track2_info).group()
- sdk_link_number = re.search(re.compile('[0-9]+'), sdk_link).group()
- info_model = 'hi @{} Please check the package whether works well and the changelog info is as below:\n' \
- '{}\n{}\n' \
- '\n* (The version of the package is only a temporary version for testing)\n' \
- '\nhttps://github.com/Azure/azure-rest-api-specs/pull/{}\n' \
- .format(issue_object_rg.user.login, track1_info_model, track2_info_model, str(latest_pr_number))
-
- return info_model, sdk_link_number
-
-
def reply_owner(reply_content):
issue_object_rg.create_comment(reply_content)
-def add_label(label_name, labels):
- if label_name not in labels:
- labels.append(label_name)
- issue_object_rg.set_labels(*labels)
+
+def get_reply_and_sdk_number_from_readme(rest_repo, link_dict):
+ commits = rest_repo.get_commits(path=link_dict['resource_manager'])
+ latest_commit = commits[0]
+ check_run_id = latest_commit.get_check_runs(check_name='SDK azure-sdk-for-python-track2')[0].id
+ latest_pr_number = latest_commit.get_pulls()[0].number
+ details = rest_repo.get_check_run(check_run_id).output.text
+ sdk_link_number = re.findall(r'/azure-sdk-for-python/pull/(\d*)">Release SDK Changes', details)[0]
+ changelog = ' python-track2
track2_azure-mgmt-{} '.format(
+ details.split('track2_azure-mgmt-')[-1])
+ info_model = 'hi @{} Please check the package whether works well and the changelog info ' \
+ 'is as below:\n{}\n' \
+ '\n* (The version of the package is only a temporary version for testing)\n' \
+ '\nhttps://github.com/Azure/azure-rest-api-specs/pull/{}\n' \
+ .format(issue_object_rg.user.login, changelog, str(latest_pr_number))
+ return info_model, sdk_link_number
def begin_reply_generate(item, rest_repo, readme_link, sdk_repo, pipeline_url):
@@ -103,20 +79,19 @@ def begin_reply_generate(item, rest_repo, readme_link, sdk_repo, pipeline_url):
issue_object_rg = item.issue_object
link_dict = get_links(readme_link)
labels = item.labels
- whether_change_readme = weather_change_readme(rest_repo, link_dict, labels)
+ whether_change_readme = readme_comparison(rest_repo, link_dict, labels)
if not whether_change_readme:
- latest_pr_number = get_latest_pr_from_readme(rest_repo, link_dict)
- reply_content, sdk_link_number = latest_pr_parse(rest_repo, latest_pr_number)
- run_pipeline = apr.run_pipeline(issue_link=issue_object_rg.html_url,
+ reply_content, sdk_link_number = get_reply_and_sdk_number_from_readme(rest_repo, link_dict)
+ res_run = run_pipeline(issue_link=issue_object_rg.html_url,
sdk_issue_object=sdk_repo.get_pull(int(sdk_link_number)),
pipeline_url=pipeline_url
)
- if run_pipeline:
- print(f'{issue_object_rg.number} run pipeline successfully')
+ if res_run:
+ logging.info(f'{issue_object_rg.number} run pipeline successfully')
else:
- print(f'{issue_object_rg.number} run pipeline fail')
+ logging.info(f'{issue_object_rg.number} run pipeline fail')
reply_owner(reply_content)
- add_label('auto-ask-check', labels)
+ issue_object_rg.add_to_labels('auto-ask-check')
else:
- print('issue {} need config readme***********'.format(issue_object_rg.number))
+ logging.info('issue {} need config readme'.format(issue_object_rg.number))
diff --git a/scripts/release_issue_status/update_issue_body.py b/scripts/release_issue_status/update_issue_body.py
deleted file mode 100644
index b96a25daec92..000000000000
--- a/scripts/release_issue_status/update_issue_body.py
+++ /dev/null
@@ -1,105 +0,0 @@
-import re
-
-
-def update_issue_body(sdk_repo, rest_repo, issue_number):
- # Get Issue Number
- issue_info = sdk_repo.get_issue(number=issue_number)
- issue_body = issue_info.body
- issue_body_list = [i for i in issue_body.split("\n") if i]
- # Get the link and readme tag in issue body
- link, readme_tag = '', ''
- for row in issue_body_list:
- if 'link' in row.lower():
- link = row.split(":", 1)[-1].strip()
- if 'readme tag' in row.lower():
- readme_tag = row.split(":", 1)[-1].strip()
- if link and readme_tag:
- break
-
- if link.count('https') > 1:
- link = link.split(']')[0]
- link = link.replace('[', "").replace(']', "").replace('(', "").replace(')', "")
-
- package_name, readme_link, output_folder = get_pkname_and_readme_link(rest_repo, link)
-
- # Check readme tag format
- if 'package' not in readme_tag:
- readme_tag = 'package-{}'.format(readme_tag)
- issue_body_list.insert(0, f'Readme Tag: {readme_tag}')
-
- issue_body_list.insert(0, f'\n{readme_link.replace("/readme.md", "")}')
- issue_body_list.insert(1, package_name)
- issue_body_up = ''
- for raw in issue_body_list:
- if raw == '---\r' or raw == '---':
- issue_body_up += '\n'
- issue_body_up += raw + '\n'
-
- issue_info.edit(body=issue_body_up)
- return package_name, readme_link, output_folder
-
-
-def get_pkname_and_readme_link(rest_repo, link):
- # change commit link to pull json link(i.e. https://github.com/Azure/azure-rest-api-specs/commit/77f5d3b5d2fbae17621ea124485788f496786758#diff-708c2fb843b022cac4af8c6f996a527440c1e0d328abb81f54670747bf14ab1a)
- pk_name = ''
- if 'commit' in link:
- commit_sha = link.split('commit/')[-1]
- commit = rest_repo.get_commit(commit_sha)
- link = commit.files[0].blob_url
- link = re.sub('blob/(.*?)/specification', 'blob/main/specification', link)
-
- # if link is a pr, it can get both pakeage name and readme link.
- if 'pull' in link:
- pr_number = int(link.replace("https://github.com/Azure/azure-rest-api-specs/pull/", "").strip('/'))
-
- # Get Readme link
- pr_info = rest_repo.get_pull(number=pr_number)
- pk_url_name = set()
- for pr_changed_file in pr_info.get_files():
- contents_url = pr_changed_file.contents_url
- if '/resource-manager' in contents_url:
- try:
- pk_url_name.add(re.findall(r'/specification/(.*?)/resource-manager/', contents_url)[0])
- except Exception as e:
- continue
- if len(pk_url_name) > 1:
- print("\nexists multiple package names: {}, {} \n".format(pk_url_name, pk_url_name1))
- raise Exception('Not find readme link, because it exists multiple package names')
-
- readme_link = 'https://github.com/Azure/azure-rest-api-specs/blob/main/specification/{}/' \
- 'resource-manager/readme.python.md'.format(list(pk_url_name)[0])
-
-
- # if link is a rest url(i.e. https://github.com/Azure/azure-rest-api-specs/blob/main/specification/xxx/resource-manager/readme.python.md)
- elif '/resource-manager' not in link:
- # (i.e. https://github.com/Azure/azure-rest-api-specs/tree/main/specification/xxxx)
- readme_link = link + '/resource-manager/readme.python.md'
- else:
- readme_link = link.split('/resource-manager')[0] + '/resource-manager/readme.python.md'
- # get the package name by readme link
- readme_link_part = '/specification' + readme_link.split('/specification')[-1]
- readme_contents = str(rest_repo.get_contents(readme_link_part).decoded_content)
- pk_name = re.findall(r'package-name: (.*?)\\n', readme_contents)[0]
- out_folder = re.findall(r'\$\(python-sdks-folder\)/(.*?)/azure-', readme_contents)[0]
- readme_link = readme_link.replace('python.', '')
-
- return pk_name, readme_link, out_folder
-
-
-def find_readme_and_output_folder(sdk_repo, rest_repo, issue_number):
- # Get Issue Number
- issue_info = sdk_repo.get_issue(number=issue_number)
- issue_body = issue_info.body
- issue_body_list = issue_body.split("\n")
- for row in issue_body_list:
- if 'resource-manager' in row:
- readme_link = '{}/readme.md'.format(row.strip("\r"))
- # Get output folder from readme.python.md
- readme_python_link = readme_link.split('/resource-manager')[0] + '/resource-manager/readme.python.md'
- readme_python_link_part = '/specification' + readme_python_link.split('/specification')[-1]
- readme_contents = str(rest_repo.get_contents(readme_python_link_part).decoded_content)
- output_folder = re.findall(r'\$\(python-sdks-folder\)/(.*?)/azure-', readme_contents)[0]
-
- return readme_link, output_folder
- raise Exception('Not find readme link,please check')
-
diff --git a/scripts/release_issue_status/utils.py b/scripts/release_issue_status/utils.py
new file mode 100644
index 000000000000..f96d4a973017
--- /dev/null
+++ b/scripts/release_issue_status/utils.py
@@ -0,0 +1,213 @@
+import datetime
+import json
+import os
+import re
+import logging
+
+from azure.devops.v6_0.pipelines.pipelines_client import PipelinesClient
+from azure.devops.v6_0.pipelines import models
+from bs4 import BeautifulSoup
+from msrest.authentication import BasicAuthentication
+import requests
+
+logging.basicConfig(level=logging.INFO,
+ format='[auto-reply log] - %(funcName)s[line:%(lineno)d] - %(levelname)s: %(message)s')
+
+# Add readme link and package name to the user's issue
+def update_issue_body(sdk_repo, rest_repo, issue_number):
+ # Get Issue Number
+ issue_info = sdk_repo.get_issue(number=issue_number)
+ issue_body = issue_info.body
+ issue_body_list = [i for i in issue_body.split("\n") if i]
+ # Get the link and readme tag in issue body
+ link, readme_tag = '', ''
+ for row in issue_body_list:
+ if 'link' in row.lower():
+ link = row.split(":", 1)[-1].strip()
+ if 'readme tag' in row.lower():
+ readme_tag = row.split(":", 1)[-1].strip()
+ if link and readme_tag:
+ break
+
+ if link.count('https') > 1:
+ link = link.split(']')[0]
+ link = link.replace('[', "").replace(']', "").replace('(', "").replace(')', "")
+
+ package_name, readme_link, output_folder = _get_pkname_and_readme_link(rest_repo, link)
+ # Check readme tag format
+ if 'package' not in readme_tag:
+ readme_tag = 'package-{}'.format(readme_tag)
+ issue_body_list.insert(0, f'Readme Tag: {readme_tag}')
+
+ issue_body_list.insert(0, f'\n{readme_link.replace("/readme.md", "")}')
+ issue_body_list.insert(1, package_name)
+ issue_body_up = ''
+ for raw in issue_body_list:
+ if raw == '---\r' or raw == '---':
+ issue_body_up += '\n'
+ issue_body_up += raw + '\n'
+
+ issue_info.edit(body=issue_body_up)
+ return package_name, readme_link, output_folder
+
+
+def _get_pkname_and_readme_link(rest_repo, link):
+ # change commit link to pull json link(i.e. https://github.com/Azure/azure-rest-api-specs/commit/77f5d3b5d2fbae17621ea124485788f496786758#diff-708c2fb843b022cac4af8c6f996a527440c1e0d328abb81f54670747bf14ab1a)
+ pk_name = ''
+ if 'commit' in link:
+ commit_sha = link.split('commit/')[-1]
+ commit = rest_repo.get_commit(commit_sha)
+ link = commit.files[0].blob_url
+ link = re.sub('blob/(.*?)/specification', 'blob/main/specification', link)
+
+ # if link is a pr, it can get both pakeage name and readme link.
+ if 'pull' in link:
+ pr_number = int(link.replace("https://github.com/Azure/azure-rest-api-specs/pull/", "").strip('/'))
+
+ # Get Readme link
+ pr_info = rest_repo.get_pull(number=pr_number)
+ pk_url_name = set()
+ for pr_changed_file in pr_info.get_files():
+ contents_url = pr_changed_file.contents_url
+ if '/resource-manager' in contents_url:
+ try:
+ pk_url_name.add(re.findall(r'/specification/(.*?)/resource-manager/', contents_url)[0])
+ except Exception as e:
+ continue
+ if len(pk_url_name) > 1:
+ logging.info("\nexists multiple package names: {} \n".format(pk_url_name))
+ raise Exception('Not find readme link, because it exists multiple package names')
+
+ readme_link = 'https://github.com/Azure/azure-rest-api-specs/blob/main/specification/{}/' \
+ 'resource-manager/readme.python.md'.format(list(pk_url_name)[0])
+ # if link is a rest url(i.e. https://github.com/Azure/azure-rest-api-specs/blob/main/specification/xxx/resource-manager/readme.python.md)
+ elif '/resource-manager' not in link:
+ # (i.e. https://github.com/Azure/azure-rest-api-specs/tree/main/specification/xxxx)
+ readme_link = link + '/resource-manager/readme.python.md'
+ else:
+ readme_link = link.split('/resource-manager')[0] + '/resource-manager/readme.python.md'
+ # get the package name by readme link
+ pk_name, out_folder = _find_package_name_and_output(rest_repo, readme_link)
+ readme_link = readme_link.replace('python.', '')
+ return pk_name, readme_link, out_folder
+
+
+# Get readme link and output folder in user issue
+def get_readme_and_output_folder(sdk_repo, rest_repo, issue_number):
+ # Get Issue Number
+ issue_info = sdk_repo.get_issue(number=issue_number)
+ issue_body = issue_info.body
+ issue_body_list = issue_body.split("\n")
+ for row in issue_body_list:
+ if 'resource-manager' in row:
+ readme_link = '{}/readme.md'.format(row.strip("\r"))
+ # Get output folder from readme.python.md
+ readme_python_link = readme_link.split('/resource-manager')[0] + '/resource-manager/readme.python.md'
+ _, output_folder = _find_package_name_and_output(rest_repo, readme_python_link)
+ return readme_link, output_folder
+ raise Exception('Not find readme link,please check')
+
+
+# Find package name and output folder from readme link
+def _find_package_name_and_output(rest_repo, readme_link):
+ readme_link_part = '/specification' + readme_link.split('/specification')[-1]
+ readme_contents = str(rest_repo.get_contents(readme_link_part).decoded_content)
+ pk_name = re.findall(r'package-name: (.*?)\\n', readme_contents)[0]
+ out_folder = re.findall(r'\$\(python-sdks-folder\)/(.*?)/azure-', readme_contents)[0]
+ return pk_name, out_folder
+
+
+# get python pipeline name and definitionId from web
+def get_python_pipelines():
+ python_piplines = {}
+ pipeline_client = PipelinesClient(base_url='https://dev.azure.com/azure-sdk',
+ creds=BasicAuthentication(os.getenv('PIPELINE_TOKEN'), ''))
+ pipelines = pipeline_client.list_pipelines(project='internal')
+ for pipeline in pipelines:
+ if re.findall('^python - \w*$', pipeline.name):
+ key = pipeline.name.replace('python - ', '')
+ python_piplines[key] = pipeline.id
+ return python_piplines
+
+# get the pipeline url through definitionid
+def get_pipeline_url(python_piplines, output_folder):
+ definitionId = python_piplines.get(output_folder)
+ if definitionId:
+ pipeline_url = 'https://dev.azure.com/azure-sdk/internal/_build?definitionId={}'.format(definitionId)
+ else:
+ logging.info('Cannot find definitionId, Do not display pipeline_url')
+ pipeline_url = ''
+ return pipeline_url
+
+
+# Run sdk-auto-release(main) to generate SDK
+def run_pipeline(issue_link, sdk_issue_object, pipeline_url):
+ paramaters = {
+ "stages_to_skip": [],
+ "resources": {
+ "repositories": {
+ "self": {
+ "refName": "refs/heads/main"
+ }
+ }
+ },
+ "variables": {
+ "BASE_BRANCH": {
+ "value": f"{sdk_issue_object.head.label}",
+ "isSecret": False
+ },
+ "ISSUE_LINK": {
+ "value": f"{issue_link}",
+ "isSecret": False
+ },
+ "PIPELINE_LINK": {
+ "value": f"{pipeline_url}",
+ "isSecret": False
+ }
+ }
+ }
+ # Fill in with your personal access token and org URL
+ personal_access_token = os.getenv('PIPELINE_TOKEN')
+ organization_url = 'https://dev.azure.com/azure-sdk'
+
+ # Create a connection to the org
+ credentials = BasicAuthentication('', personal_access_token)
+ run_parameters = models.RunPipelineParameters(**paramaters)
+ client = PipelinesClient(base_url=organization_url, creds=credentials)
+ result = client.run_pipeline(project='internal',pipeline_id=2500,run_parameters=run_parameters)
+ if result.state == 'inProgress':
+ return True
+ else:
+ return False
+
+
+# Auto reply to the user pypi link and close the issue. If the new version is successfully published.
+def auto_close_issue(sdk_repo, item):
+ issue_number, package_name = item.issue_object.number, item.package
+ issue_info = sdk_repo.get_issue(number=issue_number)
+ issue_author = issue_info.user.login
+ last_comment = list(issue_info.get_comments())[-1]
+ last_comment_date = last_comment.created_at
+ last_version, last_time = _get_last_released_date(package_name)
+ if last_time and last_time > last_comment_date:
+ comment = f'Hi @{issue_author}, pypi link: https://pypi.org/project/{package_name}/{last_version}/'
+ issue_info.create_comment(body=comment)
+ issue_info.edit(state='closed')
+ item.issue_object.add_to_labels('auto-closed')
+ logging.info(f"issue number:{issue_number} has been closed!")
+
+
+def _get_last_released_date(package_name):
+ pypi_link = f'https://pypi.org/project/{package_name}/#history'
+ res = requests.get(pypi_link)
+ soup = BeautifulSoup(res.text, 'html.parser')
+ # find top div from
+ try:
+ package_info = soup.select('div[class="release-timeline"]')[0].find_all('div')[0]
+ last_version_mix = package_info.find_all('p', class_="release__version")[0].contents[0]
+ except IndexError as e:
+ return '', ''
+ last_version = last_version_mix.replace(' ', '').replace('\n', '')
+ last_version_date_str = package_info.time.attrs['datetime'].split('+')[0]
+ last_version_date = datetime.datetime.strptime(last_version_date_str, '%Y-%m-%dT%H:%M:%S')
+ return last_version, last_version_date