diff --git a/scripts/release_issue_status/auto_close.py b/scripts/release_issue_status/auto_close.py deleted file mode 100644 index ca439398aa59..000000000000 --- a/scripts/release_issue_status/auto_close.py +++ /dev/null @@ -1,36 +0,0 @@ -import datetime - -import requests -from bs4 import BeautifulSoup - - -def auto_close_issue(sdk_repo, item): - issue_number, package_name = item.issue_object.number, item.package - issue_info = sdk_repo.get_issue(number=issue_number) - issue_author = issue_info.user.login - last_comment = list(issue_info.get_comments())[-1] - last_comment_date = last_comment.created_at - last_version, last_time = get_last_released_date(package_name) - if last_time and last_time > last_comment_date: - comment = f'Hi @{issue_author}, pypi link: https://pypi.org/project/{package_name}/{last_version}/' - issue_info.create_comment(body=comment) - issue_info.edit(state='closed') - item.labels.append('auto-closed') - item.issue_object.set_labels(*item.labels) - print(f"issue number:{issue_number} has been closed!") - - -def get_last_released_date(package_name): - pypi_link = f'https://pypi.org/project/{package_name}/#history' - res = requests.get(pypi_link) - soup = BeautifulSoup(res.text, 'html.parser') - # find top div from
- try: - package_info = soup.select('div[class="release-timeline"]')[0].find_all('div')[0] - last_version_mix = package_info.find_all('p', class_="release__version")[0].contents[0] - except IndexError as e: - return '', '' - last_version = last_version_mix.replace(' ', '').replace('\n', '') - last_version_date_str = package_info.time.attrs['datetime'].split('+')[0] - last_version_date = datetime.datetime.strptime(last_version_date_str, '%Y-%m-%dT%H:%M:%S') - return last_version, last_version_date diff --git a/scripts/release_issue_status/auto_pipeline_run.py b/scripts/release_issue_status/auto_pipeline_run.py deleted file mode 100644 index ef35ee2612fe..000000000000 --- a/scripts/release_issue_status/auto_pipeline_run.py +++ /dev/null @@ -1,47 +0,0 @@ -import json -import os -import re -from msrest.authentication import BasicAuthentication -from azure.devops.v6_0.pipelines.pipelines_client import PipelinesClient -from azure.devops.v6_0.pipelines import models -import requests - - -def run_pipeline(issue_link, sdk_issue_object, pipeline_url): - paramaters = { - "stages_to_skip": [], - "resources": { - "repositories": { - "self": { - "refName": "refs/heads/main" - } - } - }, - "variables": { - "BASE_BRANCH": { - "value": f"{sdk_issue_object.head.label}", - "isSecret": False - }, - "ISSUE_LINK": { - "value": f"{issue_link}", - "isSecret": False - }, - "PIPELINE_LINK": { - "value": f"{pipeline_url}", - "isSecret": False - } - } - } - # Fill in with your personal access token and org URL - personal_access_token = os.getenv('PIPELINE_TOKEN') - organization_url = 'https://dev.azure.com/azure-sdk' - - # Create a connection to the org - credentials = BasicAuthentication('', personal_access_token) - run_parameters = models.RunPipelineParameters(**paramaters) - client = PipelinesClient(base_url=organization_url, creds=credentials) - result = client.run_pipeline(project='internal',pipeline_id=2500,run_parameters=run_parameters) - if result.state == 'inProgress': - return True - else: - return False diff --git a/scripts/release_issue_status/get_python_pipeline.py b/scripts/release_issue_status/get_python_pipeline.py deleted file mode 100644 index 5b0939a704d2..000000000000 --- a/scripts/release_issue_status/get_python_pipeline.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -import re - -from msrest.authentication import BasicAuthentication -from azure.devops.v6_0.pipelines.pipelines_client import PipelinesClient - - -def get_python_pipelines(): - python_piplines = {} - pipeline_client = PipelinesClient(base_url='https://dev.azure.com/azure-sdk', - creds=BasicAuthentication('', os.getenv('PIPELINE_TOKEN'))) - pipelines = pipeline_client.list_pipelines(project='internal') - for pipeline in pipelines: - if re.findall('^python - \w*$', pipeline.name): - key = pipeline.name.replace('python - ', '') - python_piplines[key] = pipeline.id - return python_piplines - - -def get_pipeline_url(python_piplines, output_folder): - definitionId = python_piplines.get(output_folder) - if definitionId: - pipeline_url = 'https://dev.azure.com/azure-sdk/internal/_build?definitionId={}'.format(definitionId) - else: - print('Cannot find definitionId, Do not display pipeline_url') - pipeline_url = '' - return pipeline_url diff --git a/scripts/release_issue_status/main.py b/scripts/release_issue_status/main.py index 64c5d1ac4bd5..6bc1289cfc0a 100644 --- a/scripts/release_issue_status/main.py +++ b/scripts/release_issue_status/main.py @@ -4,20 +4,22 @@ from datetime import date, datetime import subprocess as sp import traceback +import logging from github import Github from azure.storage.blob import BlobClient import reply_generator as rg -from update_issue_body import update_issue_body, find_readme_and_output_folder -from auto_close import auto_close_issue -from get_python_pipeline import get_python_pipelines, get_pipeline_url +from utils import update_issue_body, get_readme_and_output_folder, \ + get_python_pipelines, get_pipeline_url, auto_close_issue _NULL = ' ' _FILE_OUT = 'release_issue_status.csv' _FILE_OUT_PYTHON = 'release_python_status.md' _PYTHON_SDK_ADMINISTRATORS = {'msyyc', 'RAY-316', 'BigCat20196'} +logging.basicConfig(level=logging.INFO, + format='[auto-reply log] - %(funcName)s[line:%(lineno)d] - %(levelname)s: %(message)s') def my_print(cmd): print('==' + cmd + ' ==\n') @@ -127,46 +129,42 @@ def _latest_comment_time(comments, delay_from_create_date): def auto_reply(item, request_repo, rest_repo, sdk_repo, duplicated_issue, python_piplines): - print("==========new issue number: {}".format(item.issue_object.number)) - if 'Configured' in item.labels: - item.labels.remove('Configured') + logging.info("new issue number: {}".format(item.issue_object.number)) if 'auto-link' not in item.labels: - item.labels.append('auto-link') - item.issue_object.set_labels(*item.labels) + item.issue_object.add_to_labels('auto-link') try: package_name, readme_link, output_folder = update_issue_body(request_repo, rest_repo, item.issue_object.number) - print("pkname, readme", package_name, readme_link) + logging.info("pkname, readme", package_name, readme_link) item.package = package_name key = ('Python', item.package) duplicated_issue[key] = duplicated_issue.get(key, 0) + 1 except Exception as e: item.bot_advice = 'failed to modify the body of the new issue. Please modify manually' - item.labels.append('attention') - item.issue_object.set_labels(*item.labels) - print(e) + item.issue_object.add_to_labels('attention') + logging.info(e) raise else: try: - readme_link, output_folder = find_readme_and_output_folder(request_repo, rest_repo, item.issue_object.number) + readme_link, output_folder = get_readme_and_output_folder(request_repo, rest_repo, item.issue_object.number) except Exception as e: - print('Issue: {} updates body failed'.format(item.issue_object.number)) - item.bot_advice = 'failed to find Readme link, Please check !!' - item.labels.append('attention') - item.issue_object.set_labels(*item.labels) + logging.info('Issue: {} get pkname and output folder failed'.format(item.issue_object.number)) + item.bot_advice = 'failed to find Readme link and output folder. Please check !!' + item.issue_object.add_to_labels('attention') + logging.info(e) raise try: - print("*********************") - print(python_piplines) + logging.info(python_piplines) pipeline_url = get_pipeline_url(python_piplines, output_folder) rg.begin_reply_generate(item=item, rest_repo=rest_repo, readme_link=readme_link, sdk_repo=sdk_repo, pipeline_url=pipeline_url) + if 'Configured' in item.labels: + item.issue_object.remove_from_labels('Configured') except Exception as e: item.bot_advice = 'auto reply failed, Please intervene manually !!' - print('Error from auto reply ========================') - print('Issue:{}'.format(item.issue_object.number)) - print(traceback.format_exc()) - print('==============================================') + logging.info('Error from auto reply') + logging.info('Issue:{}'.format(item.issue_object.number)) + logging.info(traceback.format_exc()) def main(): @@ -220,10 +218,19 @@ def main(): # rule6: if delay from created date is over 30 days and owner never reply, close it. # rule7: if delay from created date is over 15 days and owner never reply, remind owner to handle it. for item in issue_status: + if item.language == 'Python': + issue_status_python.append(item) if item.status == 'release': item.bot_advice = 'better to release asap.' elif (item.comment_num == 0 or 'Configured' in item.labels) and 'Python' in item.labels: item.bot_advice = 'new issue and better to confirm quickly.' + if 'assigned' not in item.labels: + time.sleep(0.1) + assign_count = int(str(time.time())[-1]) % 2 + if assign_count == 1: + item.issue_object.remove_from_assignees(*['RAY-316']) + item.issue_object.add_to_assignees(*['BigCat20196']) + item.issue_object.add_to_labels('assigned') try: auto_reply(item, request_repo, rest_repo, sdk_repo, duplicated_issue, python_piplines) except Exception as e: @@ -237,25 +244,20 @@ def main(): auto_close_issue(request_repo, item) except Exception as e: item.bot_advice = 'auto-close failed, please check!' - print(f"=====issue: {item.issue_object.number}, {e}") + logging.info(f"=====issue: {item.issue_object.number}, {e}") if item.days_from_latest_commit >= 30 and item.language == 'Python' and '30days attention' not in item.labels: - item.labels.append('30days attention') - item.issue_object.set_labels(*item.labels) + item.issue_object.add_to_labels('30days attention') item.issue_object.create_comment(f'hi @{item.author}, the issue is closed since there is no reply for a long time. Please reopen it if necessary or create new one.') item.issue_object.edit(state='close') elif item.days_from_latest_commit >= 15 and item.language == 'Python' and '15days attention' not in item.labels: item.issue_object.create_comment(f'hi @{item.author}, this release-request has been delayed more than 15 days,' ' please deal with it ASAP. We will close the issue if there is still no response after 15 days!') - item.labels.append('15days attention') - item.issue_object.set_labels(*item.labels) + item.issue_object.add_to_labels('15days attention') # judge whether there is duplicated issue for same package if item.package != _NULL and duplicated_issue.get((item.language, item.package)) > 1: item.bot_advice = f'Warning:There is duplicated issue for {item.package}. ' + item.bot_advice - - if item.language == 'Python': - issue_status_python.append(item) # output result output_python_md(issue_status_python) diff --git a/scripts/release_issue_status/release_issue_status.yml b/scripts/release_issue_status/release_issue_status.yml index 0e3146fdee9e..410e1952c99f 100644 --- a/scripts/release_issue_status/release_issue_status.yml +++ b/scripts/release_issue_status/release_issue_status.yml @@ -38,7 +38,8 @@ jobs: export TOKEN=$(USR_TOKEN) export HEADERS=$(PIPELINE_HEADERS) export URL=$(PIPELINE_URL) - export PIPELINE_TOKEN = $(PIPELINE_TOKEN) + export PIPELINE_TOKEN=$(USR_PIPELINE_TOKEN) + export COOKIE=$(USR_TOKEN) # create virtual env python -m venv venv-sdk diff --git a/scripts/release_issue_status/reply_generator.py b/scripts/release_issue_status/reply_generator.py index eddf8a9fee02..ad586e04fac7 100644 --- a/scripts/release_issue_status/reply_generator.py +++ b/scripts/release_issue_status/reply_generator.py @@ -1,10 +1,13 @@ -import auto_pipeline_run as apr +from utils import run_pipeline import re +import logging issue_object_rg = None +logging.basicConfig(level=logging.INFO, + format='[auto-reply log] - %(funcName)s[line:%(lineno)d] - %(levelname)s: %(message)s') -def weather_change_readme(rest_repo, link_dict, labels): +def readme_comparison(rest_repo, link_dict, labels): # to see whether need change readme contents = str(rest_repo.get_contents(link_dict['readme_path']).decoded_content) pattern_tag = re.compile(r'tag: package-[\w+-.]+') @@ -13,6 +16,8 @@ def weather_change_readme(rest_repo, link_dict, labels): whether_multi_api = 'multi-api' in readme_python_contents whether_same_tag = link_dict['readme_tag'] in package_tag whether_change_readme = not whether_same_tag or whether_multi_api and not 'MultiAPI' in labels + if 'Configured' in labels: + whether_change_readme = False return whether_change_readme @@ -47,55 +52,26 @@ def get_latest_pr_from_readme(rest_repo, link_dict): return latest_pr_number_int[-1] -def latest_pr_parse(rest_repo, latest_pr_number): - latest_pr = rest_repo.get_issue(latest_pr_number) - latest_pr_comments = latest_pr.get_comments() - b = [i for i in latest_pr_comments] - for comment in latest_pr_comments: - if '

Swagger Generation Artifacts

' in comment.body: - return swagger_generator_parse(comment.body, latest_pr_number) - - -def swagger_generator_parse(context, latest_pr_number): - track1_info_model = '' - try: - if ' azure-sdk-for-python' in context: - pattern_python_t1 = re.compile(' azure-sdk-for-python.+?', re.DOTALL) - python_t1 = re.search(pattern_python_t1, context).group() - prttern_python_track1 = re.compile('