diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f1e3383 --- /dev/null +++ b/.gitignore @@ -0,0 +1,137 @@ +# .gitignore itself +# .gitignore + +# Visual Studio Code +.vscode + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-shm +db.sqlite3-wal +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/__init__.py b/Database/.gitkeep similarity index 100% rename from __init__.py rename to Database/.gitkeep diff --git a/PkgBot.py b/PkgBot.py new file mode 100755 index 0000000..ad3e739 --- /dev/null +++ b/PkgBot.py @@ -0,0 +1,115 @@ +#!/usr/local/autopkg/python + +import multiprocessing +import sys + +# import asyncio +import secure +import uvicorn + +from fastapi import FastAPI +from fastapi.responses import RedirectResponse +from fastapi.staticfiles import StaticFiles +from fastapi.middleware.httpsredirect import HTTPSRedirectMiddleware + +from tortoise.contrib.fastapi import register_tortoise + +from pkgbot import config, settings + +config = config.load_config(cli_args=tuple(sys.argv[1:])) + +from pkgbot.utilities import common as utility +from pkgbot.db import models +from pkgbot import api + + +log = utility.log + +app = FastAPI( + title="PkgBot API", + description="A framework to manage software packaging, testing, and promoting from a " + "development to production environment.", + version="0.2.0", + openapi_tags=settings.api.tags_metadata, + docs_url="/api" +) + +app.mount("/static", StaticFiles(directory="/Library/AutoPkg/PkgBot/pkgbot/static"), name="static") +app.include_router(api.views.router) +app.include_router(api.auth.router) +app.include_router(api.autopkg.router) +app.include_router(api.package.router) +app.include_router(api.recipe.router) +app.include_router(api.bot.router) +app.include_router(api.build_msg.router) +app.include_router(api.send_msg.router) +app.include_router(api.user.router) + +register_tortoise( + app, + config = settings.db.TORTOISE_CONFIG, + generate_schemas = True, + add_exception_handlers = True +) + +# Add an exception handler to the app instance +# Used for the login/auth logic for the HTTP views +app.add_exception_handler(api.auth.NotAuthenticatedException, api.auth.exc_handler) +api.auth.login_manager.useRequest(app) + +if config.PkgBot.get("enable_ssl"): + + # Enforces that all incoming requests must be https. + app.add_middleware(HTTPSRedirectMiddleware) + server = secure.Server().set("Secure") + hsts = secure.StrictTransportSecurity().include_subdomains().preload().max_age(2592000) + cache_value = secure.CacheControl().must_revalidate() + secure_headers = secure.Secure( + server=server, + # csp=csp, + hsts=hsts, + # referrer=referrer, + # permissions=permissions_value, + cache=cache_value, + ) + + @app.middleware("http") + async def set_secure_headers(request, call_next): + response = await call_next(request) + secure_headers.framework.fastapi(response) + return response + + +async def number_of_workers(): + number_of_threads = (multiprocessing.cpu_count() * 2) - 1 + log.debug(f"Number of workers: {number_of_threads}") + return number_of_threads + + +@app.on_event("startup") +async def startup_event(): + + pkgbot_admins = config.PkgBot.get("Admins") + + for admin in pkgbot_admins: + user_object = models.PkgBotAdmin_In( + username = admin, + slack_id = pkgbot_admins.get(admin), + full_admin = True + ) + await api.user.create_or_update_user(user_object) + + +if __name__ == "__main__": + + uvicorn.run( + "PkgBot:app", + reload = config.PkgBot.get("keep_alive"), + host = config.PkgBot.get("host"), + port = config.PkgBot.get("port"), + log_config = config.PkgBot.get("log_config"), + log_level = config.PkgBot.get("uvicorn_log_level"), + # workers = asyncio.run(number_of_workers()), + ssl_keyfile = config.PkgBot.get("ssl_keyfile"), + ssl_certfile = config.PkgBot.get("ssl_certfile") + ) diff --git a/README.md b/README.md index 12e387b..dd0229b 100644 --- a/README.md +++ b/README.md @@ -1,58 +1,57 @@ # PkgBot -PkgBot is a framework to manage software packaging, testing, and promoting from a development to production environment. It utilizes the open source project AutoPkg to download and package software. A Slack Bot is utilized to send notifications and receive commands on what to do with those notifications. +PkgBot is an automation framework for the open source project [AutoPkg](https://www.github.com/autopkg) that provides a web-based front end and a Slack Bot to send notifications and receive commands. It helps manage the lifecycle of software packaging through package and version validation and then provides an interactive method to "promote" a specific package version from "development" (or "test") to production environments. -
+ ## About -PkgBot is currently written to support this workflow utilizing Jamf Pro and this build specifically expects the use of the JSSImporter Processor to upload packages into Jamf Pro. From there, several customizations are were made to `JSSImporter` as will as Post Processors, to provide the functionality that PkgBot offers. This build will be the last to support `JSSImporter` and PkgBot receive a massive overhaul in the near future. +PkgBot provides this workflow utilizing Jamf Pro and the [JamfUploader](https://github.com/grahampugh/jamf-upload) line of Processors. A Slack Bot is used to send new build notifications and allows a `PkgBot Admin` to interact with those notifications. -A Slack Bot is used to send new build notifications and allows a `PkgBot Admin` to interact with those notifications. AutoPkg generates notifications via a custom Post Processor aptly named "PkgBot." This Post Processor communicates with the PkgBot API to send messages to Slack. +To "promote" a package to a production Jamf Pro instance without re-running the entire recipe chain, a custom Post Processor (inspired by Graham Pugh's [JSSRecipeReceiptChecker](https://github.com/autopkg/grahampugh-recipes/blob/master/CommonProcessors/JSSRecipeReceiptChecker.py)) is used find and acquire the matching recipe dev run details. The values are passed to a "production recipe template" that performs the JamfUploader steps which can be configured to upload the package and optionally, update other various items (e.g. Policy, Group, Scripts, etc.) _without_ re-downloading nor re-packaging. -To "promote" a package to a production Jamf Pro instance, a custom Post Processor (forked and heavily customized from Graham Pugh's [JSSRecipeReceiptChecker](https://github.com/autopkg/grahampugh-recipes/blob/master/CommonProcessors/JSSRecipeReceiptChecker.py)) along with a custom "recipe template" is used find the previous autopkg run details, get the required info and upload the package and, optionally, update other various items (e.g. Policy, Group, Scripts, etc.) _without_ re-downloading nor re-packaging. +A web-based front end is available to review the status and history of all packages as well as the _known_ `AutoPkg` recipe configurations and statuses. -A web view is also provided where all package status and history can be reviewed as well as `AutoPkg` recipe configurations and status. +PkgBot has been running in my production environment for over a year now and is working quite well. I've been ironing out the kinks and making improvements to the overall processes and workflows to streamline everything. -PkgBot has been running in my production environment for a little over a year now and working quite well. I've been ironing out some of the kinks and making improvements to the overall process and workflows as I attempt to streamline everything. - -
+ ## Backend Design -This project is built around FastAPI and several other core libraries. I built it with an API workflow similar to that of the Jamf Pro and Classic APIs in-mind. So, if you've worked with these, this project will have a familiar feel. +This project is built around FastAPI, Celery, the Slack SDK, and several other core libraries. I built it with an API similar to the Jamf Pro (UAPI) and Classic APIs. So if you've worked with these, this project's API will have a familiar feel. -The project has a fully async code base and utilizes numerous popular Python libraries. +The project has a fully asynchronous code base and utilizes numerous popular Python libraries. -
+ ## Planned Features - * Moving to a proper "backend" system for executing tasks + * ~~Moving to a proper "backend" system for executing tasks~~ * (More) Streamlining (of) workflows - * "Hosting" the icons within PkgBot instead of Jamf Pro + * ~~"Hosting" the icons within PkgBot instead of Jamf Pro~~ * Slack slash commands for executing recipes * Support for "cleaning up" old notifications * e.g. when an app version has been "retired" * Code Improvements - * Better config loading - * Better log loading/usage + * ~~Better config loading~~ + * ~~Better log loading/usage~~ * A "setup/install" script -
+ ## Requirements -PkgBot will be written to support the Python3 framework that is shipped with `AutoPkg`. It does need numerous additional libraries that will have to be installed separately that are not included with `AutoPkg`'s bundled Python3. +PkgBot will be written to support the Python3 framework that is shipped with `AutoPkg` (currently supporting AutoPkg 2.6.0's bundled Python 3.10). It needs additional libraries that are not included with `AutoPkg`'s bundled Python3 that need to be installed separately. It also requires RabbitMQ. -The major libraries are: +The major Python libraries are: * FastAPI + * Celery * Jinja2 - * For the web views portion + * For the web front-end * Slack SDK * For Notifications * Tortoise ORM @@ -61,137 +60,25 @@ The major libraries are: See the requirements.txt file for additional libraries and dependencies. -## ("_Basic_") How to Setup - -Below will be the basics to get this setup and working. A few more bits and pieces can be done to customize things further. (For example, a LaunchDaemon to run the PkgBot webserver and a LaunchAgent to kick off scheduled runs of autopkg. Samples will be provided in the extras directory.) - -1. Install the prerequisites: - * Git - * AutoPkg - * JSSImporter - -2. Clone this repo and store it on your AutoPkg Runner. - * `git clone https://github.com/mlbz521/PkgBot.git "/Library/AutoPkg/"` - -3. Install requirements in specific location - * e.g. `pip install --target="/Library/AutoPkg/PkgBotPackages" -r requirements.txt` - * Or, if your simply testing, create and active a virtual environment and install the requirements - -4. Create a Slack Bot/App - * There are numerous tutorials on how to do this and I'm not going to go over the entire process here. I will simply provide the configuration requirements. [Official documentation](https://slack.com/help/articles/115005265703-Create-a-bot-for-your-workspace) - * Features/Functionality required - * Incoming Webhooks - * Create a webhook to post to the desired channel - * Interactive Components - * Set a `Request URL` that the Bot will send messages too and your server will receive on - * e.g. `https://pkgbot.my.server.org/slackbot/receive` - * or, if using ngrok: `https://84c5df439d74.ngrok.io/slackbot/receive` (see below) - * Bots - * OAuth & Permissions - * Scopes - * Bot Token Scopes - * chat:write - * files:write - * reactions:read - * reactions:write - * incoming-webhook - * Tokens/Secrets/Keys required: - * Bot User OAuth Token - * Signing Secret - * Bot Name - * Channel - * Channel it will be posting into - * Note: You can test PkgBot without creating the SlackBot -- obviously expect for the _actual_ Slack notifications part. - -5. Configure your environments' settings (`/[path/to/PkgBot]/settings/pkgbot_config.yaml`). - -6. Ensure your PkgBot "server" can communicate with Slack's API - * For testing, you can utilize ngrok to allow communication from Slack to your dev box. - * There are numerous tutorials on how to do this and I'm not going to go over the entire process here. I will simply provide the configuration requirements. [Official documentation ](https://ngrok.com/docs/getting-started) - * Follow steps two through four above - * In step four, use the same port defined in your `pkgbot_config.yaml` file - * e.g. `ngrok http 443` - * After starting ngrok, grab the forwarding address from the console - * e.g. `Forwarding https://84c5df439d74.ngrok.io -> http://localhost:443` - * the forwarding address is: `https://84c5df439d74.ngrok.io` - * The forwarding address will need to be entered into your Slack Bot configuration - -7. Optionally, create a private/public certificate for use with Uvicorn (_not required when testing with ngrok_) - * Generate a private key and a csr: - * `openssl req -new -newkey rsa:2048 -nodes -keyout private.key -out pkgbot_csr.csr` - * Get a publicly trusted cert with using the CSR - * Update the pkgbot_config with these values - -8. Run PkgBot via: - * `python3 pkgbot.py` - * or `chmod +x pkgbot.py && pkgbot.py` - - -### "_Basic_" Examples - -Example command to run via `autopkg` directly with the PkgBot Post Processor - -`/usr/local/bin/autopkg run com.github.mlbz521.jss.ProductionTemplate --key recipe_id="com.github.mlbz521.jss.Dropbox" --prefs="/Library/AutoPkg/PkgBotServer/settings/prod_autopkg_prefs.plist" --postprocessor PkgBot -vv` - -#### Using the pseudo cli tool - -How to import recipes from a yaml file - -`python3 -m execute.autopkg manage import --input ./settings/recipe_config.yaml` -``` -2021-06-25 15:32:55,781 - PkgBot - INFO - Importing recipe config file from: ./settings/recipe_config.yaml. -2021-06-25 15:33:52,474 - PkgBot - INFO - All recipe configurations have been imported! -``` - -Manage a single recipe - -`python3 -m execute.autopkg manage single -i com.github.mlbz521.jss.Zoom-ForIT --enable --force` - -Run a single recipe via PkgBot - -`python3 -m execute.autopkg run -e dev -i com.github.mlbz521.jss.Brother-MFC-J6935DW --pkgbot_config "/Library/AutoPkg/PkgBotServer/settings/pkgbot_config.yaml"` - - -#### Managing the LaunchAgent for Kicking off AutoPkg runs - -Load launchagent to run recipes - -`launchctl bootstrap "gui/501" ~/Library/LaunchAgents/com.github.mlbz521.autopkg.service.plist` - -Kickstart run of recipes - -`launchctl kickstart -p "gui/501/com.github.mlbz521.autopkg.service"` - -Stop - -`launchctl bootout "gui/501/com.github.mlbz521.autopkg.service"` - - -#### Managing the PkgBot LaunchDaemon - -Start PkgBot - -`sudo launchctl bootstrap system /Library/LaunchDaemons/com.github.mlbz521.pkgbot.service.plist` - -Stop +## How to Setup -`sudo launchctl bootout "system/com.github.mlbz521.pkgbot.service"` +The basics to get PkgBot setup and working are covered in the [Wiki](https://github.com/MLBZ521/PkgBot/wiki/%22Basic%22-How-to-Setup). ## The Why(s) and My Thought Process -At my organization, I have moved almost everything that needs to be "packaged" to be _packaged_ by AutoPkg. This was done for numerous reasons: +For my organization, almost every applications that needs to be "packaged," I have AutoPkg _packaging_ it. This was done for numerous reasons: * automation - * the recipe creates documentation of the steps to reproduce a package + * the recipe chain is self documenting of the steps to reproduce a package * e.g. for a new version - * a single "this is the way" [_that we do things_] + * a single "this is the way" (_that we do things_) * easier for team members to pick up and go * scalability In addition, there are a large number of Site Admins in my organization and I'm always receiving questions like: > "When is \ \ going to be available to deploy in Jamf Pro?" -There are several AutoPkg Post Processors to send webhooks to Slack to post messages for new packages, but I wanted **_more_**. In addition, we have a dev (i.e. "test") environment that we (attempt to) test everything (packages, configurations, Jamf Pro Suite versions, etc.) in first. If AutoPkg was uploading new versions into the production instance, then Site Admins could use those versions before they had _actually_ been tested. So I wanted to ensure packages could be tested in the dev environment without the risk of pre-deployment as well as automate the workflow as much as possible (from the several manual steps that I was previously performing to move packages from dev to production). +There are several AutoPkg Post Processors to send webhooks that post messages to Slack for new packages, but I wanted **_more_**. In addition, we have a dev (i.e. "test") environment that we (attempt to) test everything (packages, configurations, Jamf Pro Suite versions, etc.) in first. If AutoPkg was uploading new versions into the production instance, then Site Admins could use those versions before they had _actually_ been verified. So I wanted to ensure packages could be safely tested in the dev environment without the risk of pre-deployment as well as automating the workflow as much as possible (from the several manual steps that I was previously performing to move packages from dev to production). As it sits, PkgBot provides a fully documented system. It allows our Site Admins visibility into the following: * What software is managed/packaged via AutoPkg (recipes) @@ -204,8 +91,8 @@ As it sits, PkgBot provides a fully documented system. It allows our Site Admin * Or was is denied? * And more -All this, without having to ask anyone (aka: _me_). And its all visible within a Slack channel as well as a web view, which has a sortable and filterable table. And, if any Site Admin is feeling ambitious, via the PkgBot API. +All this without having to ask anyone (aka: _me_). All the information is visible within a Slack channel as well as the web front end, which has sortable and filterable tables; and if any Site Admin is feeling ambitious, it's also visible via the PkgBot API. -Plus a quick, or "emergency," push of a new software version to production at the _**press of a button**_ within Slack from my phone, from any where. +Plus a quick, or "emergency," promotion of a new software version to production at the _**press of a button**_ within Slack from my phone, from any where. -No, this is not a CI/CD workflow that many organizations seem to be moving their AutoPkg workflows to, but I have a different set of goals that I'm attempting to accomplish. +No, this is not a CI/CD workflow that many organizations are moving their AutoPkg workflows to, but I have a different set of goals that I'm attempting to accomplish. diff --git a/api/__init__.py b/Settings/.gitkeep similarity index 100% rename from api/__init__.py rename to Settings/.gitkeep diff --git a/api/autopkg.py b/api/autopkg.py deleted file mode 100644 index 7157ea9..0000000 --- a/api/autopkg.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/local/autopkg/python - -from datetime import datetime - -from fastapi import APIRouter, Body, Depends - -import config, utils -from db import models -from api import package, settings, user -from api.slack import send_msg -from execute import recipe_manager, recipe_runner - - -config.load() -log = utils.log -router = APIRouter( - prefix = "/autopkg", - tags = ["autopkg"], - dependencies = [Depends(user.verify_admin)], - responses = settings.custom_responses -) - - -@router.post("/workflow/dev", summary="Dev Workflow", - description="The Dev workflow will create a new package and post to chat.") -async def dev(pkg_object: models.Package_In = Body(..., pkg_object=Depends(models.Package_In))): - """Workflow to create a new package in the database and then post a message to chat. - - Args: - pkg_object (models.Package_In): Details about a package object - - Returns: - [JSON]: Result of the operation - """ - - created_pkg = await package.create(pkg_object) - results = await send_msg.new_pkg_msg(created_pkg) - pkg_db_object = await models.Packages.filter(id=created_pkg.id).first() - pkg_db_object.slack_ts = results.get('ts') - pkg_db_object.slack_channel = results.get('channel') - await pkg_db_object.save() - - # Update the "Last Ran" attribute for this recipe - recipe_object = await models.Recipes.filter(recipe_id=pkg_db_object.recipe_id).first() - recipe_object.last_ran = pkg_db_object.packaged_date - await recipe_object.save() - - return { "Result": "Success" } - - -@router.post("/workflow/prod", summary="Production Workflow", - description="Workflow to move a package into production and update the Slack message.") -async def prod(pkg_object: models.Package_In = Body(..., pkg_object=Depends(models.Package_In))): - - if pkg_object.promoted_date is None: - date_to_convert = datetime.now() - - else: - date_to_convert = pkg_object.promoted_date - - pkg_object.promoted_date = await utils.utc_to_local(date_to_convert) - - pkg_object.status = "prod" - - packages = await models.Package_Out.from_queryset( - models.Packages.filter(recipe_id=pkg_object.recipe_id, version=pkg_object.version)) - - updated_pkg_object = await package.update(packages[-1].id, pkg_object) - - # try: - results = await send_msg.promote_msg(updated_pkg_object) - return { "Result": "Success" } - - # except: - # return { "statuscode": 400, "Result": "Failed to post message" } - - -@router.post("/workflow/promote", summary="Promote package to production", -description="Promote a package to production by id.") -async def promote_package(background_tasks, id: int = Depends(package.get_package_by_id)): - - pkg_object = await package.get_package_by_id(id) - - background_tasks.add_task( - recipe_runner.main, - [ - "run", - "--action", "promote", - "--environment", "prod", - "--recipe-identifier", pkg_object.dict().get("recipe_id"), - "--pkg-name", "{}".format(pkg_object.dict().get("pkg_name")) - ] - ) - - return { "Result": "Queued background task..." } - - -@router.post("/workflow/deny", summary="Do not promote package to production", - description="Performs the necessary actions when a package is not approved to production use.") -async def deny_package(background_tasks, id: int = Depends(package.get_package_by_id)): - - pkg_object = await package.get_package_by_id(id) - - background_tasks.add_task( - recipe_manager.main, - [ - "single", - "--recipe-identifier", pkg_object.dict().get("recipe_id"), - "--disable", - "--force" - ] - ) - - await send_msg.deny_pkg_msg(pkg_object) diff --git a/api/package.py b/api/package.py deleted file mode 100644 index 6327d8b..0000000 --- a/api/package.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/local/autopkg/python - -from typing import List, Dict -from fastapi import APIRouter, Depends, HTTPException, status -from tortoise.contrib.fastapi import HTTPNotFoundError - -import utils -from db import models -from api import user, settings - - -log = utils.log - -router = APIRouter( - prefix = "/package", - tags = ["package"], - responses = settings.custom_responses -) - - -@router.get("/", summary="Get all packages", description="Get all packages in the database.", - dependencies=[Depends(user.get_current_user)]) -async def get_packages(): - - packages = await models.Package_Out.from_queryset(models.Packages.all()) - - return { "total": len(packages), "packages": packages } - - -@router.get("/id/{id}", summary="Get package by id", description="Get a package by its id.", - dependencies=[Depends(user.get_current_user)], response_model=models.Package_Out) -async def get_package_by_id(id: int): - - pkg_object = await models.Package_Out.from_queryset_single(models.Packages.get(id=id)) - - return pkg_object - - -@router.post("/", summary="Create a package", description="Create a package.", - dependencies=[Depends(user.verify_admin)], response_model=models.Package_Out) -async def create(pkg_object: models.Package_In = Depends(models.Package_In)): - - created_pkg = await models.Packages.create(**pkg_object.dict(exclude_unset=True, exclude_none=True)) - - return await models.Package_Out.from_tortoise_orm(created_pkg) - - -@router.put("/id/{id}", summary="Update package by id", description="Update a package by id.", - dependencies=[Depends(user.verify_admin)], response_model=models.Package_Out) -async def update(id: int, pkg_object: models.Package_In = Depends(models.Package_In)): - - if type(pkg_object) != dict: - pkg_object = pkg_object.dict(exclude_unset=True, exclude_none=True) - - await models.Packages.filter(id=id).update(**pkg_object) - - return await models.Package_Out.from_queryset_single(models.Packages.get(id=id)) - - -@router.delete("/id/{id}", summary="Delete package by id", description="Delete a package by id.", - dependencies=[Depends(user.verify_admin)]) -async def delete_package_by_id(id: int): - - delete_object = await models.Packages.filter(id=id).delete() - - if not delete_object: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Package does not exist.") - - else: - return { "result": "Successfully deleted package id: {}".format(id) } diff --git a/api/recipe.py b/api/recipe.py deleted file mode 100644 index 688f3ca..0000000 --- a/api/recipe.py +++ /dev/null @@ -1,270 +0,0 @@ -#!/usr/local/autopkg/python - -from functools import reduce -from typing import List, Dict - -from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException, status - -import config, utils -from db import models -from api import user, settings -from api.slack import bot, build_msg, send_msg -from execute import recipe_manager, recipe_runner - - -log = utils.log -config.load() -router = APIRouter( - prefix = "/recipe", - tags = ["recipe"], - responses = settings.custom_responses -) - - -@router.get("s/", summary="Get all recipes", description="Get all recipes in the database.", - dependencies=[Depends(user.get_current_user)]) -async def get_recipes(): - - recipes = await models.Recipe_Out.from_queryset(models.Recipes.all()) - - return { "total": len(recipes), "recipes": recipes } - - -@router.get("/id/{id}", summary="Get recipe by id", description="Get a recipe by its id.", - dependencies=[Depends(user.get_current_user)], response_model=models.Recipe_Out) -async def get_by_id(id: int): - - recipe_object = await models.Recipe_Out.from_queryset_single(models.Recipes.get(id=id)) - - return recipe_object - - -@router.get("/recipe_id/{recipe_id}", summary="Get recipe by recipe_id", description="Get a recipe by its recipe_id.", - dependencies=[Depends(user.get_current_user)], response_model=models.Recipe_Out) -async def get_by_recipe_id(recipe_id: str): - - recipe_object = await models.Recipe_Out.from_queryset_single(models.Recipes.get(recipe_id=recipe_id)) - - return recipe_object - - -@router.post("/", summary="Create a recipe", description="Create a recipe.", - dependencies=[Depends(user.verify_admin)], response_model=models.Recipe_Out) -async def create(recipe_object: models.Recipe_In = Body(..., recipe_object=Depends(models.Recipe_In))): - - created_recipe = await models.Recipes.create(**recipe_object.dict(exclude_unset=True, exclude_none=True)) - - return await models.Recipe_Out.from_tortoise_orm(created_recipe) - - -@router.put("/id/{id}", summary="Update recipe by id", description="Update a recipe by id.", - dependencies=[Depends(user.verify_admin)], response_model=models.Recipe_Out) -async def update_by_id(id: int, recipe_object: models.Recipe_In = Depends(models.Recipe_In)): - - if type(recipe_object) != dict: - recipe_object = recipe_object.dict(exclude_unset=True, exclude_none=True) - - await models.Recipes.filter(id=id).update(**recipe_object) - - return await models.Recipe_Out.from_queryset_single(models.Recipes.get(id=id)) - - -@router.put("/recipe_id/{recipe_id}", summary="Update recipe by recipe_id", description="Update a recipe by recipe_id.", - dependencies=[Depends(user.verify_admin)], response_model=models.Recipe_Out) -async def update_by_recipe_id(recipe_id: str, - recipe_object: models.Recipe_In = Body(..., recipe_object=Depends(models.Recipe_In))): - - if type(recipe_object) != dict: - recipe_object = recipe_object.dict(exclude_unset=True, exclude_none=True) - - await models.Recipes.filter(recipe_id=recipe_id).update(**recipe_object) - - return await models.Recipe_Out.from_queryset_single(models.Recipes.get(recipe_id=recipe_id)) - - -@router.delete("/id/{id}", summary="Delete recipe by id", description="Delete a recipe by id.", - dependencies=[Depends(user.verify_admin)]) -async def delete_by_id(id: int): - - delete_object = await models.Recipes.filter(id=id).delete() - - if not delete_object: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Recipe does not exist.") - - else: - return { "result": "Successfully deleted recipe id: {}".format(id) } - - -@router.delete("/recipe_id/{recipe_id}", summary="Delete recipe by recipe_id", - description="Delete a recipe by recipe_id.", dependencies=[Depends(user.verify_admin)]) -async def delete_by_recipe_id(recipe_id: str): - - delete_object = await models.Recipes.filter(recipe_id=recipe_id).delete() - - if not delete_object: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Recipe does not exist.") - - else: - return { "result": "Successfully deleted recipe id: {}".format(recipe_id) } - - -@router.post("/error", summary="Handle recipe errors", - description="This endpoint is called when a recipe errors out during an autopkg run.", - dependencies=[Depends(user.verify_admin)]) -async def recipe_error(recipe_id: str, error: str): - - # Create DB Entry in errors table - error_message = await models.ErrorMessages.create( recipe_id=recipe_id ) - - # Post Slack Message - try: - error_list = error.split(': ') - error_dict = reduce(lambda x, y: {y:x}, error_list[::-1]) - - except: - error_dict = { recipe_id: error } - - results = await send_msg.recipe_error_msg(recipe_id, error_message.id, error_dict) - - updates = { - "slack_ts": results.get('ts'), - "slack_channel": results.get('channel') - } - - await models.ErrorMessages.update_or_create(updates, id=error_message.id) - - # Mark the recipe disabled - recipe_object = await models.Recipes.filter(recipe_id=recipe_id).first() - recipe_object.enabled = False - await recipe_object.save() - - return { "Result": "Success" } - - -# @router.post("/trust", summary="Update recipe trust info", -@router.post("/trust/update", summary="Update recipe trust info", - description="Update a recipe's trust information. Runs `autopkg update-trust-info `.", - dependencies=[Depends(user.verify_admin)]) -# async def trust_recipe(id: int, background_tasks: BackgroundTasks, user_id: str, channel: str): -async def recipe_trust_update(id: int, background_tasks: BackgroundTasks, user_id: str, channel: str): - - # Get Error ID - error_object = await models.ErrorMessage_Out.from_queryset_single(models.ErrorMessages.get(id=id)) - - # Get recipe object - recipe_object = await models.Recipes.filter(recipe_id=error_object.recipe_id).first() - - # extra_args = { 'error_id': id } - - if recipe_object: - background_tasks.add_task( - recipe_runner.main, - [ - "--recipe-identifier", error_object.recipe_id, - "--action", "trust", - # str(extra_args) - "--error_id", id - ] - ) - - # Mark the recipe enabled - recipe_object.enabled = True - await recipe_object.save() - - return { "Result": "Queued background task..." } - - else: - - blocks = await build_msg.missing_recipe_msg(error_object.recipe_id, "update trust for") - - await bot.SlackBot.post_ephemeral_message( - user_id, blocks, - channel=channel, - text="Encountered error attempting to update trust for `{}`".format(error_object.recipe_id) - ) - - -# @router.post("/do-not-trust", summary="Do not approve trust changes", -@router.post("/trust/deny", summary="Do not approve trust changes", - description="This endpoint will update that database to show that the " - "changes to parent recipe(s) were not approved.", - dependencies=[Depends(user.verify_admin)]) -# async def disapprove_changes(id: int): -async def recipe_trust_deny(id: int): - - # Get Error ID - error_object = await models.ErrorMessage_Out.from_queryset_single(models.ErrorMessages.get(id=id)) - - await send_msg.deny_trust_msg(error_object) - - -# @router.post("/trust-update-success", summary="Trust info was updated successfully", -@router.post("/trust/update/success", summary="Trust info was updated successfully", - description="Performs the necessary actions after trust info was successfully updated.", - dependencies=[Depends(user.verify_admin)]) -# async def trust_update_success(recipe_id: str, msg: str): -async def recipe_trust_update_success(recipe_id: str, msg: str, error_id: int): - """ When update-trust-info succeeds """ - - # results = await models.ErrorMessage_Out.from_queryset(models.ErrorMessages.filter(recipe_id=recipe_id)) - # Get DB Entry - error_object = await models.ErrorMessage_Out.from_queryset_single(models.ErrorMessages.get(id=error_id)) - - # Hacky work around if there are multiple "error messages" in the database for the recipe id - # while not results[-1].response_url: - # del results[-1] - - return await send_msg.update_trust_success_msg(error_object) - - -# @router.post("/trust-update-error", summary="Trust info failed to update", -@router.post("/trust/update/failed", summary="Trust info failed to update", - description="Performs the necessary actions after trust info failed to update.", - dependencies=[Depends(user.verify_admin)]) -# async def trust_update_error(recipe_id: str, msg: str): #, -async def recipe_trust_update_failed(recipe_id: str, msg: str): - """ When update-trust-info fails """ - - # Get DB Entry - error_object = await models.ErrorMessage_Out.from_queryset_single(models.ErrorMessages.get(recipe_id=recipe_id)) - - results = await send_msg.update_trust_error_msg(msg, error_object) - - updates = { - "slack_ts": results.get('ts'), - "slack_channel": results.get('channel') - } - - await models.ErrorMessages.update_or_create(updates, id=error_object.id) - - # Mark the recipe disabled - recipe_object = await models.Recipes.filter(recipe_id=error_object.recipe_id).first() - recipe_object.enabled = False - await recipe_object.save() - - return { "Result": "Success" } - - -# @router.post("/trust-verify-error", summary="Parent trust info has changed", -@router.post("/trust/verify/failed", summary="Parent trust info has changed", - description="Performs the necessary actions after parent recipe trust info has changed.", - dependencies=[Depends(user.verify_admin)]) -# async def trust_error(payload: dict = Body(...)): -async def reciepe_trust_verify_failed(payload: dict = Body(...)): - """ When `autopkg verify-trust-info ` fails """ - - recipe_id = payload.get("recipe_id") - error_msg = payload.get("msg") - - # Create DB Entry in errors table - error_object = await models.ErrorMessages.create(recipe_id=recipe_id) - - # Post Slack Message - results = await send_msg.trust_diff_msg(error_msg, error_object) - - # Mark the recipe disabled - recipe_object = await models.Recipes.filter(recipe_id=error_object.recipe_id).first() - recipe_object.enabled = False - await recipe_object.save() - - return { "Result": "Success" } diff --git a/api/slack/bot.py b/api/slack/bot.py deleted file mode 100644 index d647cf5..0000000 --- a/api/slack/bot.py +++ /dev/null @@ -1,408 +0,0 @@ -#!/usr/local/autopkg/python - -import hmac -import json -import ssl -import time -import certifi - -from fastapi import APIRouter, BackgroundTasks, Request - -from slack_sdk.errors import SlackApiError -from slack_sdk.web.async_client import AsyncWebClient -from slack_sdk.webhook.async_client import AsyncWebhookClient - -import config, utils -from db import models -from api import autopkg, package, recipe, settings, user -from api.slack import build_msg - - -config.load() -log = utils.log - -SlackBot = None -ssl_context = ssl.create_default_context(cafile=certifi.where()) -router = APIRouter( - prefix = "/slackbot", - tags = ["slackbot"], - responses = settings.custom_responses -) - - -class SlackClient(object): - - def __init__(self, **kwargs): - self.token = kwargs["token"] - self.bot_name = kwargs["bot_name"] - self.channel = kwargs["channel"] - self.slack_id = kwargs["slack_id"] - - self.client = AsyncWebClient(token=self.token, ssl=ssl_context) - - - async def post_message(self, blocks, text="Pkg status incoming..."): - - try: - response = await self.client.chat_postMessage( - channel = self.channel, - text = text, - blocks = await utils.replace_sensitive_strings(blocks), - username = self.bot_name, - icon_emoji = ":x:" - ) - - except SlackApiError as error: - log.error("Slack encountered an error: {}".format(error.response["error"])) - raise error from error - - return response - - - async def update_message(self, blocks, ts, text="Updated message..."): - - try: - response = await self.client.chat_update( - channel = self.channel, - text = text, - blocks = await utils.replace_sensitive_strings(blocks), - ts = str(ts) - # username = self.bot_name, - # icon_emoji = ":x:" - ) - - except SlackApiError as error: - log.error("Slack encountered an error: {}".format(error.response["error"])) - raise error from error - - return response - - - async def delete_message(self, ts): - - try: - await self.client.chat_delete( - channel = self.channel, - ts = str(ts) - ) - - return { "Result": "Successfully deleted message" } - - except SlackApiError as error: - log.error("Slack encountered an error: {}".format(error.response["error"])) - return error - - - async def update_message_with_response_url(self, response_url, blocks, text="Pkg status update..."): - - try: - webhook = AsyncWebhookClient(url=response_url, ssl=ssl_context) - response = await webhook.send( - text = text, - blocks = await utils.replace_sensitive_strings(blocks), - replace_original = True - ) - - if response.status_code != 200: - log.error("Failed to update message! Status code: {} | Error message: {}".format(response.status_code, response.body)) - - else: - log.debug("Successfully updated msg via response_url") - - return response - - except SlackApiError as error: - log.error("Slack encountered an error: {}".format(error)) - log.error("Slack encountered an error.dir: {}".format(dir(error))) - log.error("Slack encountered an error.response['error']: {}".format(error.response["error"])) - raise error from error - - - async def post_ephemeral_message(self, user, blocks, channel, text="Private Note"): - - try: - response = await self.client.chat_postEphemeral( - channel = self.channel, - user = user, - text = text, - blocks = await utils.replace_sensitive_strings(blocks), - username = self.bot_name, - icon_emoji = ":x:" - ) - - except SlackApiError as error: - log.error("Slack encountered an error: {}".format(error.response["error"])) - raise error from error - - return response - - - async def file_upload(self, content=None, file=None, filename=None, filetype=None, - title=None, text=None, thread_ts=None): - - try: - response = await self.client.files_upload( - channels = self.channel, - content = content, - file = file, - filename = filename, - filetype = filetype, - title = title, - initial_comment = await utils.replace_sensitive_strings(text), - thread_ts = thread_ts, - username = self.bot_name - ) - - except SlackApiError as error: - log.error("Slack encountered an error: {}".format(error.response["error"])) - raise error from error - - return response - - - async def invoke_reaction(self, **kwargs): - - kwargs.update( - { - "channel": kwargs.get("channel", self.channel), - "timestamp": str(kwargs.get("ts")) - } - ) - - if "ts" in kwargs: - del kwargs["ts"] - - try: - return await self.client.api_call( - "reactions.{}".format(kwargs.get("action")), - params = kwargs - ) - - except SlackApiError as error: - error_key = error.response["error"] - - if not ( - kwargs.get("action") == "add" and error_key == "already_reacted" or - kwargs.get("action") == "remove" and error_key == "no_reaction" - ): - log.error("Slack encountered an error: {}".format(error_key)) - raise error from error - - else: - log.debug("Unable to perform the specified reaction action") - - - async def reaction(self, action=None, emoji=None, ts=None, **kwargs): - - # log.debug("Args:\n\taction: {}\n\temoji: {}\n\tts: {}\n\tkwargs: {}".format( - # action, emoji, ts, kwargs)) - - # log.debug("Checking current reactions") - - # Force checking if this works or not..... - # It's not.... - # response = await self.client.api_call( - # "reactions.get", - # http_verb = "GET", - # params = { - # 'channel': 'C0266ANUEJZ', - # 'timestamp': '1646121180.754269' - # } - # ) - -##### This is currently not working.... - # # Check if reaction exists or not... - # response = await self.invoke_reaction(action="get", ts=ts, http_verb="GET") - # # log.debug("forced get response:\n{}".format(response)) - # reactions = response.get("message").get("reactions") - - # for reaction in reactions: - # if ( - # reaction.get("name") == kwargs.get("emoji") and - # elf.slack_id in reaction.get("users") - # ): - # log.debug("Reaction already exists") - # exists = True - # break - - # log.debug("Reaction doesn't exist") - # exists = False - - # if ( - # action == "add" and exists == False or - # action == "remove" and exists == True - # ): - - return await self.invoke_reaction(action=action, name=emoji, ts=ts, **kwargs) - - -async def verify_slack_request(request: Request): - - try: - slack_timestamp = request.headers.get("X-Slack-Request-Timestamp") - - if abs(time.time() - int(slack_timestamp)) > 60 * 5: - # The request timestamp is more than five minutes from local time. - # It could be a replay attack, so let's ignore it. - return False - - slack_body = (await request.body()).decode("UTF-8") - signature_basestring = ("v0:{}:{}".format(slack_timestamp, slack_body)).encode() - - computed_signature = "v0=" + await utils.compute_hex_digest( - bytes(config.pkgbot_config.get("Slack.signing_secret"), "UTF-8"), - signature_basestring) - - slack_signature = request.headers.get("X-Slack-Signature") - - if hmac.compare_digest(computed_signature, slack_signature): - log.debug("Valid Slack message") - return True - - else: - log.warning("Invalid Slack message!") - return False - - except: - - return False - - -@router.on_event("startup") -async def startup_constructor(): - - global SlackBot - - SlackBot = SlackClient( - token = config.pkgbot_config.get("Slack.bot_token"), - bot_name = config.pkgbot_config.get("Slack.bot_name"), - channel = config.pkgbot_config.get("Slack.channel"), - slack_id = config.pkgbot_config.get("Slack.slack_id") - ) - - -@router.post("/receive", summary="Handles incoming messages from Slack", - description="This endpoint receives incoming messages from Slack and calls the required " - "actions based on the message after verify the authenticity of the source.") -async def receive(request: Request, background_tasks: BackgroundTasks): - - valid_request = await verify_slack_request(request) - - if valid_request: - - form_data = await request.form() - payload = form_data.get("payload") - payload_object = json.loads(payload) - - user_id = payload_object.get("user").get("id") - username = payload_object.get("user").get("username") - channel = payload_object.get("channel").get("id") - message_ts = payload_object.get("message").get("ts") - response_url = payload_object.get("response_url") - - button_text = payload_object.get("actions")[0].get("text").get("text") - button_value_type, button_value = ( - payload_object.get("actions")[0].get("value")).split(":") - - log.debug("Incoming details:\n" - "user id: {}\nusername: {}\nchannel: {}\nmessage_ts: {}\nresponse_url: {}\nbutton_text: " - "{}\nbutton_value_type: {}\nbutton_value: {}\n".format( - user_id, username, channel, message_ts, response_url, button_text, button_value_type, button_value)) - - slack_user_object = models.PkgBotAdmin_In( - username = username, - slack_id = user_id - ) - - user_that_clicked = await user.get_user(slack_user_object) - - # try: - - # if user_that_clicked.full_admin: - # full_admin = True - - # except: - # full_admin = False - - # Verify click was from a PkgBotAdmin... - if user_that_clicked: - - # Perform action only if from a PkgBotAdmin - log.debug("PkgBotAdmin clicked button:") - - if button_text == "Approve": - log.debug(" -> APPROVE") - - if button_value_type == "Package": - log.debug(" --> Promoting Package") - await package.update(button_value, - { "response_url": response_url, "status_updated_by": username }) - - background_tasks.add_task( autopkg.promote_package, background_tasks, button_value ) -##### Testing this function -- can be removed - # await SlackBot.reaction( - # action = "remove", - # emoji = "gear", - # ts = message_ts - # ) - - elif button_value_type == "Trust": - log.debug(" --> Updating Trust Info") - - error_object = await models.ErrorMessages.filter(id=button_value).first() - - updates = { "response_url": response_url, "status_updated_by": username, "slack_ts": message_ts } - - await models.ErrorMessages.update_or_create(updates, id=error_object.id) - - background_tasks.add_task( recipe.trust_recipe, button_value, background_tasks, user_id=user_id, channel=channel ) - - elif button_text == "Deny": - log.debug(" -> DENY") - - if button_value_type == "Package": - log.debug(" --> Denying Package") - - await package.update( button_value, - { "response_url": response_url, - "status_updated_by": username, - "status": "Denied", - "notes": "This package was not approved for use in production." } - ) - - background_tasks.add_task( autopkg.deny_package, - background_tasks, button_value ) - - if button_value_type == "Trust": - log.debug(" --> Disapprove Trust Changes") - - error_object = await models.ErrorMessages.filter(id=button_value).first() - - updates = { - "response_url": response_url, - "status_updated_by": username, - "status": "Denied" - } - - await models.ErrorMessages.update_or_create(updates, id=error_object.id) - await recipe.disapprove_changes(button_value) - - await SlackBot.reaction( - action = "add", - emoji = "gear", - ts = message_ts - ) - - else: - - log.warning("Unauthorized user: `{}` [{}].".format(username, user_id)) - blocks = await build_msg.unauthorized_msg(username) - - await SlackBot.post_ephemeral_message(user_id, blocks, channel=channel, text="WARNING: Unauthorized access attempted") - - return { "results": 200 } - - else: - - log.warning("Invalid request") - return { "results": 500 } diff --git a/api/slack/build_msg.py b/api/slack/build_msg.py deleted file mode 100644 index f4cab41..0000000 --- a/api/slack/build_msg.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/local/autopkg/python - -import json - -from fastapi import APIRouter, Depends, Response - -import utils -from db import models -from api import settings, user -from api.slack import block_builders - - -log = utils.log -SlackBot = None -router = APIRouter( - prefix = "/slackbot/build", - tags = ["slackbot"], - dependencies = [Depends(user.verify_admin)], - responses = settings.custom_responses -) - - -@router.get("/new-pkg-msg", summary="Build new package message", - description="Builds a 'new package' message for Slack after " - "a .pkg has been added to the dev environment.") -async def new_pkg_msg(pkg_object: models.Package_In = Depends(models.Package_In)): - - blocks = [ - await block_builders.brick_header(pkg_object), - await block_builders.brick_main(pkg_object), - await block_builders.brick_footer_dev(pkg_object) - ] - - for brick in await block_builders.brick_button(pkg_object): - blocks.append(brick) - - return json.dumps(blocks, indent=4) - - -@router.get("/recipe-error", summary="Build error message", - description="Builds an 'error' message for Slack after a recipe has returned an error.") -async def recipe_error_msg(recipe_id: str, id: int, error: str): - - formatted_error = json.dumps(error, indent=4) - brick_error = await block_builders.brick_error(recipe_id, formatted_error) - - return json.dumps(brick_error, indent=4) - - -@router.get("/trust-diff-msg", summary="Build trust diff message", - description="Builds a message with the trust diff contents " - "for Slack after a recipe's parent trust info has changed.") -async def trust_diff_msg(id: int, recipe: str, error: str = None): - - blocks = [ - await block_builders.brick_trust_diff_header(), - await block_builders.brick_trust_diff_main(recipe) - ] - - if error: - blocks.append( await block_builders.brick_trust_diff_content(error) ) - - blocks.append( await block_builders.brick_trust_diff_button(id) ) - - return json.dumps(blocks, indent=4) - - -@router.get("/deny-pkg-msg", summary="Build deny package message", - description="Builds a 'package denied message' for Slack when " - "a .pkg is not approved for the production environment.") -async def deny_pkg_msg(pkg_object: models.Package_In = Depends(models.Package_In)): - - brick_footer = await block_builders.brick_footer_dev(pkg_object) - brick_footer.get("elements").append( - await block_builders.brick_footer_denied(pkg_object) - ) - - blocks = [ - await block_builders.brick_deny_pkg(pkg_object), - await block_builders.brick_main(pkg_object), - brick_footer - ] - - return json.dumps(blocks, indent=4) - - -@router.get("/deny-trust-msg", summary="Build deny trust message", - description="Builds an message for Slack stating a recipe's " - "parent trust info changes were not approved.") -async def deny_trust_msg( - error_object: models.ErrorMessage_In = Depends(models.ErrorMessage_In)): - - blocks = [ - await block_builders.brick_deny_trust(error_object), - await block_builders.brick_footer_denied_trust(error_object) - ] - - return json.dumps(blocks, indent=4) - - -@router.get("/promote-msg", summary="Build promoted package message", - description="Builds a 'package has been promoted' message for Slack " - "after a .pkg has been approved for the production environment.") -async def promote_msg(pkg_object: models.Package_In = Depends(models.Package_In)): - - brick_footer = await block_builders.brick_footer_dev(pkg_object) - brick_footer.get("elements").append( - await block_builders.brick_footer_promote(pkg_object) - ) - - blocks = [ - await block_builders.brick_main(pkg_object), - brick_footer - ] - - return json.dumps(blocks, indent=4) - - -@router.get("/update-trust-success-msg", summary="Build trust update success message", - description="Builds a 'success' message for Slack when a " - "recipe's trust info is updated successfully.") -async def update_trust_success_msg( - error_object: models.ErrorMessage_In = Depends(models.ErrorMessage_In)): - - blocks = [ - await block_builders.brick_update_trust_success_msg(error_object), - await block_builders.brick_footer_update_trust_success_msg(error_object) - ] - - return json.dumps(blocks, indent=4) - - -@router.get("/update-trust-error-msg", summary="Build trust update error message", - description="Builds an 'error' message for Slack when a recipe's trust info fails to update.") -async def update_trust_error_msg(msg: str, - error_object: models.ErrorMessage_In = Depends(models.ErrorMessage_In)): - - return json.dumps( - [ await block_builders.brick_update_trust_error_msg(error_object, msg) ], - indent=4 - ) - - -@router.get("/unauthorized-msg", summary="Build unauthorized message", - description="Builds a 'unauthorized' message for Slack when a user attempts to " - "perform a Slack interation with PkgBot that they're not authorized to perform.") -async def unauthorized_msg(user): - - return json.dumps( - await block_builders.unauthorized(user), - indent=4 - ) - - -@router.get("/missing-recipe-msg", summary="Build unauthorized message", - description="Builds a 'missing recipe' message for Slack when unable to locate " - "a recipe for a requested action.") -async def missing_recipe_msg(recipe_id, text): - - return json.dumps( - await block_builders.missing_recipe_msg(recipe_id, text), - indent=4 - ) diff --git a/api/slack/send_msg.py b/api/slack/send_msg.py deleted file mode 100644 index 3d7676a..0000000 --- a/api/slack/send_msg.py +++ /dev/null @@ -1,179 +0,0 @@ -#!/usr/local/autopkg/python - -from fastapi import APIRouter, Depends, Response - -import utils -from db import models -from api import package, settings, user -from api.slack import bot, build_msg - - -log = utils.log -SlackBot = None -router = APIRouter( - prefix = "/slackbot/send", - tags = ["slackbot"], - dependencies = [Depends(user.verify_admin)], - responses = settings.custom_responses -) - -max_content_size = 1500 - - -@router.post("/dev-msg", summary="Send new package message", - description="Sends a 'new package' message to Slack after " - "a .pkg has been added to the dev environment.") -async def new_pkg_msg(pkg_object: models.Package_In = Depends(models.Package_In)): - - return await bot.SlackBot.post_message( - await build_msg.new_pkg_msg(pkg_object), - text="Update for {}".format(pkg_object.name) - ) - - -@router.post("/promote-msg", summary="Send promoted package message", - description="Sends a 'package has been promoted' message to Slack " - "after a .pkg has been approved for the production environment.") -async def promote_msg(pkg_object: models.Package_In = Depends(models.Package_In)): - - await bot.SlackBot.update_message_with_response_url( - pkg_object.dict().get("response_url"), - await build_msg.promote_msg(pkg_object), - text="{} was promoted to production".format(pkg_object.pkg_name) - ) - - return await bot.SlackBot.reaction( - action = "remove", - emoji = "gear", - ts = pkg_object.slack_ts - ) - - -@router.post("/recipe-error-msg", summary="Send error message", - description="Sends an 'error' message to Slack after a recipe has returned an error.") -async def recipe_error_msg(recipe_id: str, id: int, error: str): - - blocks = await build_msg.recipe_error_msg(recipe_id, id, error) - - return await bot.SlackBot.post_message(blocks, text="Encountered recipe error") - - -@router.post("/trust-diff-msg", summary="Send trust diff message", - description="Sends a message with the trust diff contents to " - "Slack after a recipe's parent trust info has changed.") -async def trust_diff_msg( - error_msg: str, error_object: models.ErrorMessage_In = Depends(models.ErrorMessage_In)): - - if len(error_msg) > max_content_size: - - blocks = await build_msg.trust_diff_msg(error_object.id, error_object.recipe_id) - - else: - - blocks = await build_msg.trust_diff_msg(error_object.id, error_object.recipe_id, error_msg) - - response = await bot.SlackBot.post_message( - blocks, - text="Trust verification failed for `{}`".format(error_object.recipe_id) - ) - - error_object.slack_ts = response.get('ts') - error_object.save() - - if len(error_msg) > max_content_size: - - response = await bot.SlackBot.file_upload( - content = error_msg, - filename = "{}.diff".format(error_object.recipe_id), - filetype = "diff", - title = error_object.recipe_id, - text = "Diff Output for {}".format(error_object.recipe_id), - thread_ts = error_object.slack_ts - ) - - return response - - -@router.put("/update-trust-success-msg", summary="Send trust update success message", - description="Sends a 'success' message to Slack when " - "a recipe's trust info is updated successfully.") -async def update_trust_success_msg( - error_object: models.ErrorMessage_In = Depends(models.ErrorMessage_In)): - - blocks = await build_msg.update_trust_success_msg(error_object) - - response = await bot.SlackBot.update_message_with_response_url( - error_object.dict().get("response_url"), blocks, - text="Successfully updated trust info for {}".format(error_object.recipe_id)) - - if response.status_code == 200: - await bot.SlackBot.reaction( - action = "remove", - emoji = "gear", - ts = error_object.slack_ts - ) - - return response - - -@router.put("/update-trust-error-msg", summary="Send trust update error message", - description="Sends an 'error' message to Slack when a recipe's trust info fails to update.") -async def update_trust_error_msg(msg: str, - error_object: models.ErrorMessage_In = Depends(models.ErrorMessage_In)): - - blocks = await build_msg.update_trust_error_msg(error_object, msg) - - return await bot.SlackBot.update_message_with_response_url( - error_object.dict().get("response_url"), blocks, - text="Failed to update trust info for {}".format(error_object.recipe_id)) - - -@router.put("/deny-pkg-msg", summary="Send deny package message", - description="Sends a 'package denied message' to Slack when " - "a .pkg is not approved for the production environment.") -async def deny_pkg_msg(pkg_object: models.Package_In = Depends(models.Package_In)): - - blocks = await build_msg.deny_pkg_msg(pkg_object) - - response = await bot.SlackBot.update_message_with_response_url( - pkg_object.dict().get("response_url"), blocks, - text="{} was not approved for production".format(pkg_object.pkg_name)) - - if response.status_code == 200: - await bot.SlackBot.reaction( - action = "remove", - emoji = "gear", - ts = pkg_object.slack_ts - ) - - return response - - -@router.put("/deny-trust-msg", summary="Send deny trust message", - description="Send an message to Slack stating a recipe's " - "parent trust info changes were not approved.") -async def deny_trust_msg( - error_object: models.ErrorMessage_In = Depends(models.ErrorMessage_In)): - - blocks = await build_msg.deny_trust_msg(error_object) - - response = await bot.SlackBot.update_message_with_response_url( - error_object.dict().get("response_url"), blocks, - text="Trust info for {} was not approved".format(error_object.recipe_id)) - - if response.status_code == 200: - await bot.SlackBot.reaction( - action = "remove", - emoji = "gear", - ts = error_object.slack_ts - ) - - return response - - -@router.delete("/ts/{ts}", summary="Delete message by TS", - description="Delete a Slack message by its TS.", - dependencies=[Depends(user.verify_admin)]) -async def delete_by_ts(ts: int): - - return await bot.SlackBot.delete_message(ts) diff --git a/api/views.py b/api/views.py deleted file mode 100644 index 2fdddcd..0000000 --- a/api/views.py +++ /dev/null @@ -1,126 +0,0 @@ -#!/usr/local/autopkg/python - -import asyncio -import functools -import time - -from datetime import datetime -from typing import Callable - -from fastapi import APIRouter, Depends, Request -from fastapi.responses import HTMLResponse -from fastapi.templating import Jinja2Templates - -import config, utils - -from api import auth -from api import package as package_api -from api import recipe as recipe_api - - -log = utils.log -config.load() - - -def template_filter_datetime(date, date_format=None): - - if date: - - if not date_format: - date_format = "%Y-%m-%d %I:%M:%S" - - converted = datetime.fromisoformat(str(date)) - - return converted.strftime(date_format) - - -session = { "logged_in": False } -templates = Jinja2Templates(directory=config.pkgbot_config.get("PkgBot.jinja_templates")) -templates.env.filters["strftime"] = template_filter_datetime - -router = APIRouter( - tags = ["view"], - include_in_schema = False -) - - -@router.get("/", response_class=HTMLResponse) -async def index(request: Request): - - if request.state.user: - session["logged_in"] = True - - else: - session["logged_in"] = False - - return templates.TemplateResponse("index.html", { "request": request, "session": session }) - - -# @router.get("/login", response_class=HTMLResponse) -# async def userlogin(request: Request): - -# return templates.TemplateResponse("login.html", { "request": request, "session": session }) - - -@router.get("/packages", response_class=HTMLResponse) -async def package_history(request: Request, user = Depends(auth.login_manager)): - - session["logged_in"] = True - - pkgs = await package_api.get_packages() - - table_headers = [ - "", "", "Name", "Version", "Status", "Updated By", - "Packaged", "Promoted", "COMMON", "Flags", "Notes" - ] - - return templates.TemplateResponse("packages.html", - { "request": request, "session": session, - "table_headers": table_headers, "packages": pkgs.get("packages") }) - - -@router.get("/package/{id}", response_class=HTMLResponse) -async def package(request: Request, user = Depends(auth.login_manager)): - - session["logged_in"] = True - - pkg = await package_api.get_package_by_id(request.path_params['id']) - - return templates.TemplateResponse("package.html", - { "request": request, "session": session, "package": pkg }) - - -@router.get("/edit/{id}", response_class=HTMLResponse) -async def edit(request: Request, user = Depends(auth.login_manager)): - - pkg = await package_api.get_package_by_id(request.path_params['id']) - - return templates.TemplateResponse("edit.html", - { "request": request, "session": session, "package": pkg }) - - -@router.get("/recipes", response_class=HTMLResponse) -async def recipe_list(request: Request, user = Depends(auth.login_manager)): - - session["logged_in"] = True - - pkgs = await recipe_api.get_recipes() - - table_headers = [ - "ID", "Recipe ID", "Name", "Enable", "Pkg Only", "Last Ran", "Schedule", "Notes" - ] - - return templates.TemplateResponse("recipes.html", - { "request": request, "session": session, - "table_headers": table_headers, "recipes": pkgs.get("recipes") }) - - -@router.get("/recipe/{id}", response_class=HTMLResponse) -async def recipe_page(request: Request, user = Depends(auth.login_manager)): - - session["logged_in"] = True - - pkg = await recipe_api.get_by_id(request.path_params['id']) - - return templates.TemplateResponse("recipe.html", - { "request": request, "session": session, "recipe": pkg }) diff --git a/config.py b/config.py deleted file mode 100644 index b80da85..0000000 --- a/config.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/local/autopkg/python - -import os -import sys -import yaml - - -class PkgBot_Configuration(): - def __init__(self): - self.config = {} - - def add(self, key, value): - - self.config[key] = value - - def get(self, key): - - return self.config.get(key, None) - - -def load(args=None, **kwargs): - - # print(f"args: {args}") - # print(f"kwargs: {kwargs}") - passed_config_file = kwargs.get('pkgbot_config', None) - # env_config_file = os.environ.get('PKGBOT_CONFIG') - env_config_file = "./settings/local_pkgbot_config.yaml" - - # print(f"passed_config_file: {passed_config_file}") - - if passed_config_file != None and os.path.exists(passed_config_file): - config_file = passed_config_file - - elif env_config_file != None and os.path.exists(env_config_file): - config_file = env_config_file - - else: - print("\nError: Unable to load configuration.\n") - sys.exit(1) - - # Read in the configuration file - with open(config_file, "rb") as yaml_file: - configuration = yaml.safe_load(yaml_file) - - ################################################## - # Define variables - - PkgBotConfig = PkgBot_Configuration() - - for section in configuration: - for key in configuration.get(section): - PkgBotConfig.add("{}.{}".format(section, key), configuration[section].get(key)) - - if configuration.get("AutoPkg").get("binary") is None: - PkgBotConfig.add("AutoPkg.binary", "/usr/local/bin/autopkg") - - if configuration.get("Git").get("binary") is None: - PkgBotConfig.add("Git.binary", "/usr/bin/git") - - globals()["pkgbot_config"] = PkgBotConfig.config - - -if __name__ == "__main__": - print("Initializing PkgBot Configuration...") - load() diff --git a/db/db.sqlite3 b/db/db.sqlite3 deleted file mode 100644 index bf8606b..0000000 Binary files a/db/db.sqlite3 and /dev/null differ diff --git a/extras/JSSRecipeReceiptChecker.py b/examples/PkgBotPromoter.py similarity index 60% rename from extras/JSSRecipeReceiptChecker.py rename to examples/PkgBotPromoter.py index cf04f44..5ee7677 100644 --- a/extras/JSSRecipeReceiptChecker.py +++ b/examples/PkgBotPromoter.py @@ -1,8 +1,8 @@ -#!/usr/bin/python +#!/usr/local/autopkg/python # # Copyright 2022 Zack Thompson (MLBZ521) -# -# Based on Graham R Pugh's `JSSRecipeReceiptChecker.py` +# +# Inspired by Graham R Pugh's `JSSRecipeReceiptChecker.py` # https://github.com/autopkg/grahampugh-recipes/blob/main/CommonProcessors/JSSRecipeReceiptChecker.py # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,9 +16,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""See docstring for JSSRecipeReceiptChecker class""" - -from __future__ import absolute_import +"""See docstring for PkgBotPromoter class""" import os import plistlib @@ -30,14 +28,15 @@ from autopkglib import Processor, ProcessorError -__all__ = ["JSSRecipeReceiptChecker"] +__all__ = ["PkgBotPromoter"] -class JSSRecipeReceiptChecker(Processor): +class PkgBotPromoter(Processor): """An AutoPkg processor which works out the latest receipt from a different AutoPkg recipe, and provides useful values from its contents, which can be used to run a different recipe based on those values.""" + description = __doc__ input_variables = { "name": { "description": ( @@ -77,36 +76,18 @@ class JSSRecipeReceiptChecker(Processor): "required": False, } } - output_variables = { "version": { "description": "The current package version." }, - "CATEGORY": { - "description": "The package category." - }, - "SELF_SERVICE_DESCRIPTION": { - "description": "The self service description." - }, "pkg_path": { "description": "The package path." }, - "SELF_SERVICE_ICON": { - "description": "The self service icon." - }, - "package_notes": { - "description": "The package notes." - }, - "prod_name": { - "description": "The prod name of the Policy." - }, "PARENT_RECIPES": { "description": "The parent recipes, used to locate the self service icon." } } - description = __doc__ - def get_recipe_receipts(self, cache_dir, name): """Get the receipts for the passed recipe name. @@ -129,19 +110,17 @@ def get_recipe_receipts(self, cache_dir, name): files.sort(key=lambda x: getmtime(x), reverse=True) return files - except IOError as e: - raise ProcessorError("No receipts found!") from e + except IOError as error: + raise ProcessorError("No receipts found!") from error def main(self): - """Find the latest receipt that contains all - the information we're looking for. + """Find the latest receipt that contains all the information we're looking for. Raises: ProcessorError: Proper input variables must be supplied. ProcessorError: Package does not exist at the path found. - ProcessorError: Unable to locate a receipt - with the desired information. + ProcessorError: Unable to locate a receipt with the desired information. """ name = self.env.get("name") @@ -152,8 +131,10 @@ def main(self): version_found = False found_parent_recipes = False + ignore_keys = [ "API_PASSWORD", "API_USERNAME", "JSS_REPOS", "JSS_URL", "JSS_VERIFY_SSL" ] + if name: - name = f"local.jss.{name}" + name = f"local.jamf.{name}" elif recipe_id: name = recipe_id @@ -176,75 +157,79 @@ def main(self): self.output(" -> Skipping as this receipt had an error...") continue - list_parent_recipes = [] for step in plist: if step.get("Recipe input"): - recipe_input = step.get("Recipe input") - parent_recipes = recipe_input.get("PARENT_RECIPES") + self.output("Checking the Recipe input section...", verbose_level=3) - if parent_recipes: + if parent_recipes := recipe_input.get("PARENT_RECIPES"): found_parent_recipes = True - list_parent_recipes.extend(parent_recipes) - list_parent_recipes.extend( - (recipe_input.get("RECIPE_DIR"), recipe_input.get("RECIPE_PATH")) - ) - # self.env["PARENT_RECIPES"].extend(parent_recipes) - # self.env["PARENT_RECIPES"].append(recipe_input.get("RECIPE_DIR")) - # self.env["PARENT_RECIPES"].append(recipe_input.get("RECIPE_PATH")) - self.output(f'Parent Recipes: {self.env["PARENT_RECIPES"]}', verbose_level=2) + parent_recipes.extend( + [recipe_input.get("RECIPE_DIR"), recipe_input.get("RECIPE_PATH")]) for key in custom_variables: self.env[key] = recipe_input.get(key) - self.output(f"{key}: {self.env[key]}", verbose_level=2) + self.output(f"{key}: {self.env[key]}", verbose_level=3) self.env["NAME"] = recipe_input.get("NAME") self.output(f'NAME: {self.env["NAME"]}', verbose_level=2) - continue + elif re.search("InputVariableTextSubstituter", step.get("Processor"), re.IGNORECASE): + self.output(f"Checking the {step.get('Processor')} section...", verbose_level=2) + processor_output = step.get("Output") + key = processor_output.get("return_variable") + value = processor_output.get("return_variable_value") + self.output(f"{key}: {value}", verbose_level=3) + self.env[key] = value - if step.get("Processor"): + elif re.search("JamfCategoryUploader", step.get("Processor"), re.IGNORECASE): + self.output(f"Checking the {step.get('Processor')} section...", verbose_level=2) + processor_input = step.get("Input") - if re.search("InputVariableTextSubstituter", step.get("Processor"), re.IGNORECASE): - processor_output = step.get("Output") - key = processor_output.get("return_variable") - value = processor_output.get("return_variable_value") - self.output(f"{key}: {value}", verbose_level=2) - self.env[key] = value - continue + for key, value in processor_input.items(): - if re.search("JSSImporter", step.get("Processor"), re.IGNORECASE): - jssimporter_input = step.get("Input") + # Do not pull these values incase they're different + if key not in ignore_keys: + self.env[key] = value + self.output(f"{key}: {self.env[key]}", verbose_level=3) - for key, value in jssimporter_input.items(): + elif re.search("JamfPackageUploader", step.get("Processor"), re.IGNORECASE): + self.output(f"Checking the {step.get('Processor')} section...", verbose_level=2) + processor_input = step.get("Input") - # We don't want to pull these values, incase they're different. - if key not in { - "API_PASSWORD", "API_USERNAME", "JSS_REPOS", "JSS_URL", "JSS_VERIFY_SSL" }: + for key, value in processor_input.items(): - # Set the proper CASE for these variables - var_name = ( key if key in { "package_notes", "package_priority", "pkg_path", "prod_name", "version" } - else key.upper() ) + # Do not pull these values incase they're different + if key not in ignore_keys: - self.env[var_name] = value - self.output(f"{var_name}: {self.env[var_name]}", verbose_level=2) + self.env[key] = value + self.output(f"{key}: {self.env[key]}", verbose_level=3) - if self.env["version"] and os.path.basename(self.env["pkg_path"]) == match_pkg: - version_found = True + if self.env["version"] and os.path.basename(self.env["pkg_path"]) == match_pkg: + version_found = True - continue + elif re.search("JamfPolicyUploader", step.get("Processor"), re.IGNORECASE): + self.output(f"Checking the {step.get('Processor')} section...", verbose_level=2) + processor_input = step.get("Input") - if found_parent_recipes and version_found: - break + for key, value in processor_input.items(): - else: - continue + # Do not pull these values incase they're different + if key not in ignore_keys: + + self.env[key] = value + self.output(f"{key}: {self.env[key]}", verbose_level=3) except Exception: self.output("Missing required information...") continue + if found_parent_recipes and version_found: + break + + self.env["PARENT_RECIPES"].extend(parent_recipes) + if not version_found: raise ProcessorError("Unable to locate a receipt with a matching version!") @@ -254,5 +239,5 @@ def main(self): if __name__ == "__main__": - PROCESSOR = JSSRecipeReceiptChecker() + PROCESSOR = PkgBotPromoter() PROCESSOR.execute_shell() diff --git a/examples/ProudctionTemplate.recipe.yaml b/examples/ProudctionTemplate.recipe.yaml new file mode 100644 index 0000000..5ced99f --- /dev/null +++ b/examples/ProudctionTemplate.recipe.yaml @@ -0,0 +1,41 @@ +Description: This is a recipe template that is used to pull details from a "dev" + run for a package and perform a "production" run for a particular software title. + + The matched .pkg will be uploaded into Jamf Pro and optionally, a Policy created or updated. +Identifier: com.github.mlbz521.ProductionTemplate +Input: + match_pkg: "%MATCH_PKG%" + recipe_id: "%RECIPE_ID%" + cache_dir: "%CACHE_DIR%" +Process: +- Processor: PkgBotPromoter + Arguments: + custom_variables: + - CATEGORY + - CUSTOM_TRIGGER + - EXCLUSION + - EXCLUSION2 + - EXCLUSION3 + - SCRIPT_NAME + - SCRIPT_PARAMETER_1 + - SCRIPT_PARAMETER_2 + - SCRIPT_PARAMETER_3 + - SCRIPT_PARAMETER_4 + - SCRIPT_PARAMETER_5 + - SCRIPT_PARAMETER_6 + - SCRIPT_PARAMETER_7 + - SCRIPT_PARAMETER_8 + - SCRIPT_PRIORITY + - SELF_SERVICE_DESCRIPTION + - SELF_SERVICE_DISPLAY_NAME + match_pkg: '%match_pkg%' + recipe_id: '%recipe_id%' +- Processor: com.github.grahampugh.jamf-upload.processors/JamfCategoryUploader +- Processor: com.github.grahampugh.jamf-upload.processors/JamfPackageUploader +- Processor: StopProcessingIf + Arguments: + predicate: PKG_ONLY == TRUE +- Processor: com.github.grahampugh.jamf-upload.processors/JamfPolicyUploader + Arguments: + replace_policy: "True" + Comment: Self Service install policy diff --git a/extras/examples/Approved packages.png b/examples/images/Approved packages.png similarity index 100% rename from extras/examples/Approved packages.png rename to examples/images/Approved packages.png diff --git a/examples/images/Encountered an Error.png b/examples/images/Encountered an Error.png new file mode 100644 index 0000000..a85b261 Binary files /dev/null and b/examples/images/Encountered an Error.png differ diff --git a/extras/examples/New Software Version Available.png b/examples/images/New Software Version Available.png similarity index 100% rename from extras/examples/New Software Version Available.png rename to examples/images/New Software Version Available.png diff --git a/extras/examples/Trust Verification Failure.png b/examples/images/Trust Verification Failure.png similarity index 100% rename from extras/examples/Trust Verification Failure.png rename to examples/images/Trust Verification Failure.png diff --git a/examples/launchdaemons/com.github.mlbz521.pkgbot.celery.plist b/examples/launchdaemons/com.github.mlbz521.pkgbot.celery.plist new file mode 100644 index 0000000..8562a2a --- /dev/null +++ b/examples/launchdaemons/com.github.mlbz521.pkgbot.celery.plist @@ -0,0 +1,28 @@ + + + + + Label + com.github.mlbz521.pkgbot.celery + ProgramArguments + + /usr/local/autopkg/python + -m + celery + -A + -tasks.task.celery + worker + --loglevel=info + -Q + autopkg + + KeepAlive + + WorkingDirectory + /Library/AutoPkg/PkgBot + StandardErrorPath + /Library/AutoPkg/PkgBot/log/PkgBotServer-Celery.log + StandardOutPath + /Library/AutoPkg/PkgBot/log/PkgBotServer-Celery.log + + diff --git a/extras/com.github.mlbz521.pkgbot.service.plist b/examples/launchdaemons/com.github.mlbz521.pkgbot.plist similarity index 54% rename from extras/com.github.mlbz521.pkgbot.service.plist rename to examples/launchdaemons/com.github.mlbz521.pkgbot.plist index 3fe7873..518d286 100644 --- a/extras/com.github.mlbz521.pkgbot.service.plist +++ b/examples/launchdaemons/com.github.mlbz521.pkgbot.plist @@ -3,19 +3,18 @@ Label - com.github.mlbz521.pkgbot.service + com.github.mlbz521.pkgbot ProgramArguments - /usr/local/autopkg/python - /Library/AutoPkg/PkgBotServer/pkgbot.py + /Library/AutoPkg/PkgBot/pkgbot.py KeepAlive WorkingDirectory - Library/AutoPkg/PkgBotServer + /Library/AutoPkg/PkgBot StandardErrorPath - /var/log/PkgBotServer-Service.log + /Library/AutoPkg/PkgBot/log/PkgBotServer-Service.log StandardOutPath - /var/log/PkgBotServer-Service.log + /Library/AutoPkg/PkgBot/log/PkgBotServer-Service.log diff --git a/settings/dev_autopkg_prefs.plist b/examples/settings/dev_autopkg_prefs.plist similarity index 100% rename from settings/dev_autopkg_prefs.plist rename to examples/settings/dev_autopkg_prefs.plist diff --git a/settings/list_of_public_repos.txt b/examples/settings/list_of_public_repos.txt similarity index 100% rename from settings/list_of_public_repos.txt rename to examples/settings/list_of_public_repos.txt diff --git a/examples/settings/nginx.conf b/examples/settings/nginx.conf new file mode 100644 index 0000000..f36bccd --- /dev/null +++ b/examples/settings/nginx.conf @@ -0,0 +1,72 @@ + +worker_processes 1; + +error_log /Library/AutoPkg/PkgBot/logs/nginx.Error.log warn; +#error_log logs/error.log notice; +#error_log logs/error.log info; + +#pid logs/nginx.pid; + + +events { + worker_connections 256; +} + + +http { + include mime.types; + default_type application/octet-stream; + + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_host" "$upstream_response_time"' + '"$http_referer" "$http_user_agent" "$http_x_forwarded_for"'; + + access_log /Library/AutoPkg/PkgBot/logs/nginx.Access.log main; + + sendfile on; + + keepalive_timeout 65; + + server { + listen 443 ssl; + server_name pkgbot.uto.asu.edu; + + access_log /Library/AutoPkg/PkgBot/logs/nginx.Host.Access.log main; + + ssl_certificate /Library/AutoPkg/PkgBot/Settings/certificate_chain2022.pem; + ssl_certificate_key /Library/AutoPkg/PkgBot/Settings/private2022.key; + ssl_protocols TLSv1.2; + ssl_ciphers HIGH:!aNULL:!MD5; + ssl_prefer_server_ciphers on; + + + location / { + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_redirect off; + proxy_buffering off; + proxy_pass https://pkgbot.uto.asu.edu:8443; + + } + + location /static { + # path for static files + root /Library/AutoPkg/PkgBot/pkgbot; + } + + } + + map $http_upgrade $connection_upgrade { + default upgrade; + '' close; + } + + upstream uvicorn { + server unix:/tmp/uvicorn.sock; + } + + include servers/*; +} diff --git a/settings/pkgbot_config.yaml b/examples/settings/pkgbot_config.yaml similarity index 71% rename from settings/pkgbot_config.yaml rename to examples/settings/pkgbot_config.yaml index 2011ea4..7ef67b6 100644 --- a/settings/pkgbot_config.yaml +++ b/examples/settings/pkgbot_config.yaml @@ -15,8 +15,17 @@ AutoPkg: # public_repo_list: list_of_public_repos.txt +Celery: + # RabbitMQ credentials and URL + broker_url: amqp://guest:guest@localhost:5672// + + +Database: + location: "/Database/db.sqlite3" + + JamfPro_Dev: - autopkg_prefs: /Library/AutoPkg/PkgBot/settings/dev_autopkg_prefs.plist + autopkg_prefs: /Library/AutoPkg/PkgBot/Settings/dev_autopkg_prefs.plist jps_url: https://dev.server.org:8443 verify_ssl: True api_user: @@ -28,7 +37,7 @@ JamfPro_Dev: JamfPro_Prod: - autopkg_prefs: /Library/AutoPkg/PkgBot/settings/prod_autopkg_prefs.plist + autopkg_prefs: /Library/AutoPkg/PkgBot/Settings/prod_autopkg_prefs.plist jps_url: https://prod.server.org:8443 verify_ssl: True api_user: @@ -38,7 +47,8 @@ JamfPro_Prod: dp1_user: dp1_password: # Name of the recipe that will be used when promoting packages to public - recipe_template: com.github.mlbz521.jss.ProductionTemplate + recipe_template: com.github.mlbz521.ProductionTemplate + recipe_template_pkg_only: com.github.mlbz521.ProductionTemplate Git: @@ -47,6 +57,9 @@ Git: user_name: PkgBot user_email: private_repo: git@git.org:autopkg/recipes.git + local_repo_dir: ~/Library/AutoPkg/RecipeOverrides/ + repo_primary_branch: main + repo_push_branch: main ssh_config: | Host * AddKeysToAgent yes @@ -59,12 +72,11 @@ Git: -----END OPENSSH PRIVATE KEY----- -Services: +# Services: # The name of the LaunchDaemon that will ensure that the PkgBot service is running - pkgbot_service_LaunchDaemon_label: com.github.mlbz521.pkgbot - # The name of the LaunchAgent that will run autopkg on a schedule - autopkg_service_LaunchAgent_label: com.github.mlbz521.autopkg.service - autopkg_service_LaunchAgent_start_interval: 14400 + # pkgbot_service_LaunchDaemon_label: com.github.mlbz521.pkgbot + # The interval between running configured autopkg recipes + # autopkg_service_start_interval: 14400 Slack: @@ -82,13 +94,14 @@ PkgBot: # port: 443 host: localhost port: 8000 - # ssl_keyfile: /Library/AutoPkg/PkgBot/settings/private2022.key - # ssl_certfile: /Library/AutoPkg/PkgBot/settings/certificate_chain2022.pem + # ssl_keyfile: /Library/AutoPkg/PkgBot/Settings/private2022.key + # ssl_certfile: /Library/AutoPkg/PkgBot/Settings/certificate_chain2022.pem # Reload the server when changes are made keep_alive: True Admins: : : null # Yes, _null_ -- only need to set the username here + webhook_secret: some_long_string to validate incoming messages # Jinja Template Directory jinja_templates: templates # How long the web view token is valid for (in minutes) @@ -100,13 +113,13 @@ PkgBot: disable_existing_loggers: False formatters: default: - (): 'uvicorn.logging.DefaultFormatter' + (): 'uvicorn._logging.DefaultFormatter' fmt: '%(asctime)s | %(name)s | %(levelname)s | %(message)s' debug: - (): 'uvicorn.logging.DefaultFormatter' + (): 'uvicorn._logging.DefaultFormatter' fmt: '%(asctime)s | %(name)s | %(levelname)s | %(filename)s:%(lineno)s - %(funcName)20s() | %(message)s' access: - (): 'uvicorn.logging.AccessFormatter' + (): 'uvicorn._logging.AccessFormatter' fmt: '%(asctime)s | %(name)s | %(levelname)s | %(client_addr)s - "%(request_line)s" %(status_code)s' handlers: default: @@ -115,7 +128,7 @@ PkgBot: maxBytes: 10485760 # 10MB backupCount: 20 encoding: utf8 - filename: ./log/PkgBotServer.log + filename: ./logs/PkgBotServer.log level: INFO debugging: formatter: debug @@ -123,7 +136,7 @@ PkgBot: maxBytes: 10485760 # 10MB backupCount: 20 encoding: utf8 - filename: ./log/PkgBotServer-Debug.log + filename: ./logs/PkgBotServer-Debug.log level: DEBUG access: formatter: access @@ -131,7 +144,7 @@ PkgBot: maxBytes: 10485760 # 10MB backupCount: 20 encoding: utf8 - filename: ./log/PkgBotServer.HTTP.Access.log + filename: ./logs/PkgBotServer.HTTP.Access.log loggers: uvicorn.debug: level: DEBUG @@ -154,7 +167,8 @@ PkgBot: Common: - # Strings that you do not want to be be printed "pubically" in notifications - # Any/all passwords/secrets/keys above are already included in the redacttion logic - # This value will be parsed as *REGEX* - # RedactionStrings: |\b\b + # Strings that you do not want to be be printed "publicly" in notifications + # Any/all passwords/secrets/keys above are already included in the redaction logic + # These values will be parsed as *Regex* + # redaction_strings: |\b\b + # additional_sensitive_key_names: _id|_hash diff --git a/settings/prod_autopkg_prefs.plist b/examples/settings/prod_autopkg_prefs.plist similarity index 100% rename from settings/prod_autopkg_prefs.plist rename to examples/settings/prod_autopkg_prefs.plist diff --git a/settings/recipe_config.yaml b/examples/settings/recipe_config.yaml similarity index 100% rename from settings/recipe_config.yaml rename to examples/settings/recipe_config.yaml diff --git a/execute/api_helper.py b/execute/api_helper.py deleted file mode 100644 index 6d1748f..0000000 --- a/execute/api_helper.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/local/autopkg/python - -import asyncio -import sys - -sys.path.insert(0, "/Library/AutoPkg/PkgBot") - -import httpx - -import config, utils - - -config.load() -log = utils.log - - -async def request(method, endpoint, data=None, json=None): - - pkgbot_server, headers = await _api_url_helper() - - async with httpx.AsyncClient() as client: - - if method == "get": - return await client.get('{}{}'.format(pkgbot_server, endpoint), - headers=headers - ) - - elif method == "post": - return await client.post('{}{}'.format(pkgbot_server, endpoint), - headers=headers, - data=data, - json=json - ) - - elif method == "delete": - return await client.delete('{}{}'.format(pkgbot_server, endpoint), - headers=headers - ) - - -async def _api_url_helper(): - - if config.pkgbot_config.get("PkgBot.enable_ssl"): - secure = "s" - else: - secure = "" - - pkgbot_server = "http{}://{}:{}".format( - secure, - config.pkgbot_config.get("PkgBot.host"), - config.pkgbot_config.get("PkgBot.port") - ) - - token = await authenticate_with_pkgbot( - pkgbot_server, - config.pkgbot_config.get("JamfPro_Prod.api_user"), - config.pkgbot_config.get("JamfPro_Prod.api_password") - ) - - headers = { - "Authorization": "Bearer {}".format(token), - "accept": "application/json", - "Content-Type": "application/json" - } - - return pkgbot_server, headers - - -async def authenticate_with_pkgbot(server: str, username: str, password: str): - - headers = { - "accept": "application/json", - "Content-Type": "application/x-www-form-urlencoded" - } - - data = { - "username": username, - "password": password - } - - async with httpx.AsyncClient() as client: - response_get_token = await client.post("{}/auth/token".format(server), - headers=headers, - data=data - ) - - if response_get_token.status_code == 200: - - response_json = response_get_token.json() - - return response_json["access_token"] - - -async def chat_failed_trust(recipe_id, msg): - """ Update Slack message that recipe_id failed verify-trust-info """ - - payload = { - "recipe_id": recipe_id, - "msg": msg - } - - await request( "post", "/recipe/trust/verify/failed", json=payload ) - - -async def chat_update_trust_msg(recipe_id, result, error_id): - """ Update slack message that recipe_id was trusted """ - - if result == "success": - endpoint = "trust/update/success" - else: - endpoint = "trust/update/failed" - - await request( - "post", "/recipe/{}?recipe_id={}&msg={}&error_id={}".format(endpoint, recipe_id, result, error_id) ) - - -async def chat_recipe_error(recipe_id, msg): - - await request( - "post", "/recipe/error?recipe_id={}&error={}".format(recipe_id, msg) ) - - -async def webhook_flare(recipe_id, action): - """ Send webhook when all other options fail. - - No implemented at this time. - """ - - pass - - -async def get_recipes(): - - return await request("get", "/recipes/") - - -async def get_recipe(id): - - return await request("get", "/recipe/id/{}".format(id)) - - -async def get_recipe_by_recipe_id(recipe_id): - return await request("get", "/recipe/recipe_id/{}".format(recipe_id)) - - -async def create_recipe(data): - - return await request("post", "/recipe/", data=data) - - -async def update_recipe_by_recipe_id(recipe_id, data): - - return await request("put", "/recipe/recipe_id/{}".format(recipe_id), data=data) - - -async def delete_recipe(id): - - return await request("delete", "/recipe/id/{}".format(id)) - - -async def delete_recipe_by_recipe_id(recipe_id): - - return await request("delete", "/recipe/recipe_id/{}".format(recipe_id)) diff --git a/execute/autopkg.py b/execute/autopkg.py deleted file mode 100644 index 6acce1c..0000000 --- a/execute/autopkg.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import asyncio -import sys - -sys.path.insert(0, "/Library/AutoPkg/PkgBot") - -import utils -from execute import recipe_manager, recipe_runner - - -log = utils.log - - -async def main(run_args=sys.argv[1:]): - # log.debug("All calling args: {}".format(sys.argv)) - # log.debug("All calling args: {}".format(run_args)) - - ################################################## - # Parse Script Arguments - - parser = argparse.ArgumentParser(description="PkgBot Main.") - sub_parsers = parser.add_subparsers(dest="actions", - title="Available actions", help="Specify which action to perform.") - - # This group controls switches for the `recipe_runner` module - parser_run = sub_parsers.add_parser("run", help="Run Recipe(s)") - parser_run.set_defaults(call_function=recipe_runner.main) - parser_run.add_argument("--pkgbot-config", "-p", metavar="./config.yaml", type=str, - required=False, help="A config file with defined environmental configurations.") - parser_run.add_argument("--environment", "-e", type=str, default="dev", required=False, - help="Which environment to use.") - parser_run.add_argument("--action", choices=[ "promote", "trust" ], required=False, - help="Perform the requested action on the passed recipe id.") - run_type = parser_run.add_mutually_exclusive_group() - run_type.add_argument("--all", "-a", action="store_true", required=False, - help="Runs all the recipes in the specified recipe_config file.") - run_type.add_argument("--recipe-identifier", "-i", metavar="local.Firefox", required=False, - type=str, help="A recipe identifier.") - - # This group controls switches for the `recipe_manager` module - parser_manage = sub_parsers.add_parser("manage", - help="Manage recipe configuration file.") - parser_manage.set_defaults(call_function=recipe_manager.main) - - manage_sub_parsers = parser_manage.add_subparsers(dest="action", - title="Available actions", help="Specify which action to perform.") - - parser_import = manage_sub_parsers.add_parser("import", - help="Import a recipe configuration file") - parser_import.add_argument("--input", "-n", metavar="./path/to/recipe_config.yaml", type=str, - required=True, help="A file read in defined recipe configurations.") - - parser_generate = manage_sub_parsers.add_parser("generate", - help="Generate a recipe configuration file") - parser_generate.add_argument("--recipes-directory", "-rd", - metavar="./path/to/autopkg/recipes/", type=str, required=True, - help="The directory where your recipes are stored.") - - parser_single = manage_sub_parsers.add_parser("single", - help="Perform actions on individual recipe configurations") - parser_single.add_argument("--recipe-identifier", "-i", metavar="local.Firefox", type=str, - required=True, help="A recipe identifier.") - parser_single.add_argument("--schedule", "-s", type=int, required=False, - help="An integer which will be the number of days between running the recipe.") - parser_single.add_argument("--remove", "-rm", default=False, action="store_true", - required=False, help="Remove recipe from list.") - parser_single.add_argument("--force", "-f", default=False, action="store_true", required=False, - help="If recipe config already exists, force the changes without prompting.") - state = parser_single.add_mutually_exclusive_group() - state.add_argument("--enable", "-e", default=False, action="store_true", required=False, - help="Enable the recipe to be processed.") - state.add_argument("--disable", "-d", default=False, action="store_true", required=False, - help="Disable the recipe from being processed.") - jps_handler = parser_single.add_mutually_exclusive_group() - jps_handler.add_argument("--pkg-only", "-k", default=False, action="store_true", required=False, - help="Only upload the .pkg and do not create a Policy or any other modifications.") - jps_handler.add_argument("--policy", "-rk", default=False, action="store_true", required=False, - help="Create a Policy.") - - args, unknown = parser.parse_known_args(run_args) - # log.debug("Argparse args: {}".format(args)) - - if len(run_args) == 0: - parser.print_help() - sys.exit(0) - - else: - - # Load Configuration - # if args.pkgbot_config: - # config.load(pkgbot_config=args.pkgbot_config) - - # else: - # config.load() - - await args.call_function() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/execute/recipe_manager.py b/execute/recipe_manager.py deleted file mode 100644 index 15a2b2d..0000000 --- a/execute/recipe_manager.py +++ /dev/null @@ -1,260 +0,0 @@ -#!/usr/local/autopkg/python - -import argparse -import asyncio -import os -import re -import sys - -sys.path.insert(0, "/Library/AutoPkg/PkgBot") - -import utils -from execute import api_helper - - -log = utils.log - - -def create_recipe_config(recipe_object, **kwargs): - - # Run through parameters - if kwargs.get('disable'): - recipe_object["enabled"] = False - - elif kwargs.get('enable'): - recipe_object["enabled"] = True - - else: - recipe_object.setdefault("enabled", True) - - if kwargs.get('pkg_only'): - recipe_object["pkg_only"] = True - - elif kwargs.get('policy'): - recipe_object["pkg_only"] = False - - else: - recipe_object.setdefault("pkg_only", False) - - if kwargs.get('schedule'): - recipe_object["schedule"] = kwargs.get('schedule') - - else: - recipe_object.setdefault("schedule", 0) - - recipe_object["name"] = recipe_object["recipe_id"].rsplit(".", 1)[1] - - # recipe_object.setdefault("last_ran", 0) - - return recipe_object - - -async def main(run_args=sys.argv[1:]): - - if "manage" in run_args: - run_args.remove("manage") - - # log.debug('Recipe Manager:\n\tAll calling args: {}'.format(run_args)) - # log.debug('Recipe Manager:\n\tsys.argv: {}'.format(sys.argv)) - - ################################################## - # Setup Argparse - - parser = argparse.ArgumentParser(description="Manage recipe configuration file.") - sub_parsers = parser.add_subparsers(dest='action', - title="Available actions", help="Specify which action to perform.") - - parser_import = sub_parsers.add_parser('import', - help='Import a recipe configuration file') - parser_import.add_argument('--input', '-n', metavar='./path/to/recipe_config.yaml', type=str, - required=True, help='A file read in defined recipe configurations.') - - parser_generate = sub_parsers.add_parser('generate', - help='Generate a recipe configuration file') - parser_generate.add_argument('--recipes-directory', '-rd', - metavar='./path/to/autopkg/recipes/', type=str, required=True, - help='The directory where your recipes are stored.') - parser_generate.add_argument('--output', '-o', metavar='./path/to/recipe_config.yaml', type=str, - required=True, help='Where to save generated recipe configurations.') - - parser_manage = sub_parsers.add_parser('single', - help='Perform actions on individual recipe configurations') - parser_manage.add_argument('--recipe-identifier', '-i', metavar='local.Firefox', type=str, - required=True, help='A recipe identifier.') - parser_manage.add_argument('--schedule', '-s', type=int, required=False, - help='An integer which will be the number of days between running the recipe.') - parser_manage.add_argument('--remove', '-rm', default=False, action='store_true', - required=False, help='Remove recipe from list.') - parser_manage.add_argument('--force', '-f', default=False, action='store_true', required=False, - help='If recipe config already exists, force the changes without prompting.') - - state = parser_manage.add_mutually_exclusive_group() - state.add_argument('--enable', '-e', default=False, action='store_true', required=False, - help='Enable the recipe to be processed.') - state.add_argument('--disable', '-d', default=False, action='store_true', required=False, - help='Disable the recipe from being processed.') - - jps_handler = parser_manage.add_mutually_exclusive_group() - jps_handler.add_argument('--pkg-only', '-k', default=False, action='store_true', required=False, - help='Only upload the .pkg and do not create a Policy or any other modifications.') - jps_handler.add_argument('--policy', '-rk', default=False, action='store_true', required=False, - help='Remove the `pkg_only` flag.') - - ################################################## - # Parse Script Arguments - - args, unknown = parser.parse_known_args(run_args) - - if len(run_args) == 0: - parser.print_help() - sys.exit(0) - - if args.action == "generate": - - save_path = args.output - - log.info("Generating recipe config file: {}.".format(save_path)) - - if os.path.exists(save_path): - log.warning("WARNING: This recipe configuration file already exists") - override_answer = await utils.ask_yes_or_no( - "Do you want to overwrite your recipe configuration file?") - - if not override_answer: - log.info("No changes were made.") - sys.exit(0) - - if not os.path.exists(args.recipes_directory): - log.error("ERROR: Unable to locate recipe directory!") - sys.exit(1) - - recipes = [] - - # Walk the directory provided for recipes - for root, folders, files in os.walk(args.recipes_directory): - - # Loop through the files and perform the changes desired - for a_file in files: - - # Verify file has a .recipe extension - if re.search('.recipe', a_file) and re.search(r'^((?!Retired).)*$', os.path.join(root, a_file)): - - recipes.append(os.path.join(root, a_file)) - - log.info("Found {} recipes.".format(len(recipes))) - - recipe_configs = {} - - for recipe_item in recipes: - - # Read in the recipe - plist_contents = await utils.plist_reader(recipe_item) - - # Pull information that will be checked later - identifier = plist_contents.get('Identifier') - - recipe_config = create_recipe_config({"recipe_id": identifier}, **args.__dict__) - - recipe_configs[identifier] = recipe_config - - all_recipe_configurations = {} - all_recipe_configurations["recipes"] = dict(sorted(recipe_configs.items())) - - await utils.save_yaml(all_recipe_configurations, save_path) - - log.info("Recipe config file saved.") - - elif args.action == "import": - - log.info("Importing recipe config file from: {}.".format(args.input)) - - # Load the recipe config - recipe_configurations = await utils.load_yaml(args.input) - - # Get the recipes object - recipes = recipe_configurations.get("recipes") - - for recipe_item in recipes: - - recipe_config = recipes.get(recipe_item) - recipe_config["recipe_id"] = recipe_item - recipe_config["name"] = recipe_item.rsplit(".", 1)[1] - - await api_helper.create_recipe(recipe_config) - - log.info("All recipe configurations have been imported!") - - else: - - recipe_id = args.recipe_identifier - - recipe_object = ( await api_helper.get_recipe_by_recipe_id(recipe_id) ).json() - - if recipe_object.get("detail") == "Object does not exist": - # Recipe does not exist in the database. - recipe_object = None - - if args.remove: - - if not recipe_object: - log.info("Recipe does not exist in the database.") - - else: - - if not args.force: - - log.warning("WARNING: The recipe `{}` is about to be removed.".format(recipe_id)) - remove_answer = await utils.ask_yes_or_no( - "Do you want to remove this recipe's definition?") - - if args.force or remove_answer: - - await api_helper.delete_recipe_by_recipe_id(recipe_id) - - log.info("Recipe config removed: {}.".format(recipe_id)) - - else: - - log.info("No changes were made.") - - else: - - if not recipe_object: - - log.info("Creating the recipe config for: {}.".format(recipe_id)) - - # If the recipe does not exist, create an empty dict object for it - recipe_config = create_recipe_config({"recipe_id": recipe_id}, **args.__dict__) - await api_helper.create_recipe(recipe_config) - - else: - - # The recipe already exists - log.info("Updating the recipe config for: {}.".format(recipe_id)) - - recipe_object.pop('id') - - recipe_config = create_recipe_config(recipe_object, **args.__dict__) - - # Confirm changes - if not args.force: - - log.warning("WARNING: This recipe already exists") - override_answer = await utils.ask_yes_or_no( - "Do you want to override this recipe's definition?") - - # Write changes - if args.force or override_answer: - - await api_helper.update_recipe_by_recipe_id(recipe_object["recipe_id"], recipe_config) - - else: - - log.info("No changes were made.") - -#### SOME ERROR CHECKING SHOULD PROBABLY GO BEFORE HERE - log.info("Recipe config saved for: {}".format(recipe_id)) - - -if __name__ == "__main__": - asyncio.run( main(sys.argv) ) diff --git a/execute/recipe_runner.py b/execute/recipe_runner.py deleted file mode 100644 index 61888a3..0000000 --- a/execute/recipe_runner.py +++ /dev/null @@ -1,492 +0,0 @@ -#!/usr/local/autopkg/python - -import argparse -import ast -import asyncio -import getpass -import os -import re -import sys - -from datetime import datetime, timedelta - -sys.path.insert(0, "/Library/AutoPkg/PkgBot") - -import config, utils -from api import recipe -from execute import api_helper - - -log = utils.log - -async def check_recipe_schedule(interval, last_ran): - """Check if a recipe should be ran, based on the configured schedule. - - Args: - interval (int): The "schedule" in number of days to not for - last_ran (str): datetime object in str format when repice was last ran - - Returns: - boolean: - True: Recipe should be ran - False: Recipe should not be ran - """ - - if interval != 0 and last_ran != None: - - current_time = await utils.utc_to_local(datetime.now()) - last_ran_time = datetime.fromisoformat(last_ran) - interval_in_hours = interval * 24 - - return current_time - last_ran_time > timedelta(hours=interval_in_hours) - - return True - - -async def autopkg_verify_trust(recipe_id, console_user): - """Runs the passed recipe against `autopkg verify-trust-info`. - - Args: - recipe_id (str): Recipe identifier of a recipe. - - Returns: - dict: Dict describing the results of the ran process - """ - - log.info("Verifying trust info...") - - autopkg_prefs = os.path.abspath(config.pkgbot_config.get("JamfPro_Dev.autopkg_prefs")) - - command_autopkg_recipe_trust = "{binary} verify-trust-info {run_recipe} --prefs=\"{prefs}\" -vvv".format( - binary=config.pkgbot_config.get("AutoPkg.binary"), - run_recipe=recipe_id, - prefs=autopkg_prefs - ) - - if await get_user_context(): - command_autopkg_recipe_trust = "su - {console_user} -c \"{command}\"".format( - console_user=console_user, - command=command_autopkg_recipe_trust, - ) - - results_autopkg_recipe_trust = await utils.run_process_async(command_autopkg_recipe_trust) - - # Verify success - if not results_autopkg_recipe_trust['success']: - - if "Didn't find a recipe for" in results_autopkg_recipe_trust['stdout']: - log.error("Failed to locate recipe!") - await api_helper.chat_recipe_error(recipe_id, results_autopkg_recipe_trust['stdout'] ) - - else: - # Post Slack Message with recipe trust info - log.error("Failed to verify trust info!") - await api_helper.chat_failed_trust(recipe_id, results_autopkg_recipe_trust['stderr'] ) - - return False - - return True - - -async def autopkg_update_trust(recipe_id, console_user, error_id): - """Runs the passed recipe against `autopkg update-trust-info`. - - Args: - recipe_id (str): Recipe identifier of a recipe. - - Returns: - dict: Dict describing the results of the ran process - """ - - log.info("Updating trust info...") - - autopkg_prefs = os.path.abspath(config.pkgbot_config.get("JamfPro_Dev.autopkg_prefs")) - - command_autopkg_update_trust = "{binary} update-trust-info {run_recipe} --prefs=\"{prefs}\"".format( - binary=config.pkgbot_config.get("AutoPkg.binary"), - run_recipe=recipe_id, - prefs=autopkg_prefs - ) - - if await get_user_context(): - command_autopkg_update_trust = "su - {console_user} -c \"{command}\"".format( - console_user=console_user, - command=command_autopkg_update_trust, - ) - - results_autopkg_update_trust = await utils.run_process_async(command_autopkg_update_trust) - - if not results_autopkg_update_trust['success']: - log.error("Failed to update trust info") - log.error("results_autopkg_update_trust[stdout]: {}".format(results_autopkg_update_trust['stdout'])) - log.error("results_autopkg_update_trust[stderr]: {}".format(results_autopkg_update_trust['stderr'])) - # await api_helper.chat_update_trust_msg(recipe_id, result=results_autopkg_update_trust['stdout']) - - else: - log.info("Successfully updated trust for: {}".format(recipe_id)) - - recipe_file_path = results_autopkg_update_trust["stdout"].split("Wrote updated ")[-1] - - # Verify if there are changes that need to be committed - git_updated_filename_command = "{} -C \"/Users/{}/Library/AutoPkg/RecipeOverrides/\" diff --exit-code \"{}\"".format( - config.pkgbot_config.get("Git.binary"), - console_user, - recipe_file_path - ) - results_git_updated_filename_command = await utils.run_process_async(git_updated_filename_command) - - if results_git_updated_filename_command["status"] == 1: - - log.debug("Updated recipe filename: {}".format(recipe_file_path)) - - # Switch branches - git_switch_branch_command = "{binary} -C \"{path}\" switch trust-updates > /dev/null || ( {binary} -C \"{path}\" switch -c trust-updates > /dev/null && {binary} -C \"{path}\" push origin trust-updates )".format( - binary=config.pkgbot_config.get("Git.binary"), - path="/Users/{}/Library/AutoPkg/RecipeOverrides/".format(console_user) - ) - results_git_stage_file_command = await utils.run_process_async(git_switch_branch_command) - - if results_git_stage_file_command["success"]: - - # Stage updated recipe - git_stage_file_command = "{} -C \"/Users/{}/Library/AutoPkg/RecipeOverrides/\" add \"{}\"".format( - config.pkgbot_config.get("Git.binary"), - console_user, - recipe_file_path - ) - results_git_stage_file_command = await utils.run_process_async(git_stage_file_command) - - - if results_git_stage_file_command["success"]: - - log.debug("Staged recipe in git: {}".format(recipe_id)) - - # Commit changes - git_commit_file_command = "{} -C \"/Users/{}/Library/AutoPkg/RecipeOverrides/\" commit --message \"Updated Trust Info\" --message \"Recipe: {}\" --message \"By: PkgBot\"".format( - config.pkgbot_config.get("Git.binary"), - console_user, - recipe_id - ) - results_git_commit_file_command = await utils.run_process_async(git_commit_file_command) - - if results_git_commit_file_command["success"]: - - log.debug("Commit recipe locally: {}".format(recipe_id)) - - # Commented out due to changes in my insitution's remote git environment (use of protected branches, no deploy key, etc.) - # Or do this? - # git push --set-upstream origin trust-updates - - # git_push_commit_command = "cd \"/Users/{}/Library/AutoPkg/RecipeOverrides/\"; /usr/local/bin/gh pr create --fill --base \"trust-updates\"".format( - # console_user - # ) - - ##### Can't use git to push - #### Will need to submit a PR here to the non-main branch - # git_push_commit_command = "{} -C \"/Users/{}/Library/AutoPkg/RecipeOverrides/\" push".format( - # config.pkgbot_config.get("Git.binary"), - # console_user - # ) - - # if await get_user_context(): - # git_push_commit_command = "su - {console_user} -c \"{command}\"".format( - # console_user=console_user, - # command=git_push_commit_command, - # ) - - # results_git_push_commit_command = await utils.run_process_async(git_push_commit_command) - - # if results_git_push_commit_command["success"]: - - # log.debug("Successfully pushed commit for: {}".format(recipe_id)) - - # else: - - # log.error("Failed to push commit for: {}".format(recipe_id)) - else: - - log.error("Failed commit locally for: {}".format(recipe_id)) - - else: - - log.error("Failed to stage: {}".format(recipe_id)) - - else: - - log.error("Failed to switch branch: {}".format(recipe_id)) - - else: - - log.error("Failed to get file name for: {}".format(recipe_id)) - - - await api_helper.chat_update_trust_msg(recipe_id, result="success", error_id = error_id) - - -async def autopkg_runner(**kwargs): - """Runs the passed recipe against `autopkg run`. - - Args: - recipe (str): Recipe ID of a recipe - prefs (str): Path to a autopkg preference file - - Returns: - dict: Dict describing the results of the ran process - """ - - recipe_id = kwargs["recipe_id"] - pkg_only = kwargs['pkg_only'] - promote = kwargs['promote'] - console_user = kwargs['console_user'] - pkg_name = kwargs['pkg_name'] - extra_switches = "--key 'DISABLE_CODE_SIGNATURE_VERIFICATION=True'" - - log.info("Running...") - - if promote: - recipe_to_run = config.pkgbot_config.get("JamfPro_Prod.recipe_template") - autopkg_prefs = os.path.abspath(config.pkgbot_config.get("JamfPro_Prod.autopkg_prefs")) - extra_switches = "--ignore-parent-trust-verification-errors --key 'match_pkg={}'".format(pkg_name) - - if pkg_only: - extra_switches = "{} --key jss_pkg_only=True".format(extra_switches) - - else: - recipe_to_run = recipe_id - autopkg_prefs = os.path.abspath(config.pkgbot_config.get("JamfPro_Dev.autopkg_prefs")) - - # Build the command to run - command_autopkg_run = "{binary} run {run_recipe} --key 'recipe_id={id}' --prefs='{prefs}' --ignore-parent-trust-verification-errors --postprocessor PkgBot {extras} -vv".format( - binary=config.pkgbot_config.get("AutoPkg.binary"), - run_recipe=recipe_to_run, - id=recipe_id, - prefs=autopkg_prefs, - extras=extra_switches - ) - - if await get_user_context(): - command_autopkg_run = "su - {console_user} -c \"{command}\"".format( - console_user=console_user, - command=command_autopkg_run, - ) - - results_run = await utils.run_process_async(command_autopkg_run) - - return results_run - - -async def autopkg_process_wrapper(**kwargs): - - recipe_config = kwargs["recipe_config"] - recipe_id = recipe_config.get("recipe_id") - action = kwargs["action"] - console_user = kwargs["console_user"] - pkg_name = kwargs["pkg_name"] - error_id = kwargs.get("error_id", "") - - log.info("Recipe: {}".format(recipe_id)) - - if action == "trust": - await autopkg_update_trust(recipe_id, console_user, error_id) - - else: - - if action is None: - # Run checks if we're not promoting the recipe - - if not recipe_config["enabled"]: - log.info("Recipe is disabled; exiting...") - return - - if not await check_recipe_schedule( - recipe_config["schedule"], recipe_config["last_ran"] ): - # Do not run recipe due to scheduled interval - return - - if not await autopkg_verify_trust(recipe_id, console_user): - # Failed Trust Verification - return - - # Passed Trust Verification - results_autopkg_run = await autopkg_runner( - recipe_id=recipe_id, pkg_only=recipe_config["pkg_only"], promote=action, pkg_name=pkg_name, console_user=console_user) - - # Verify success - if results_autopkg_run['success']: - log.info("Successfully ran: {}".format(recipe_id)) - -##### Do not care about success here, as the PkgBot Post-Processor will handle the rest - # run_receipt = re.search(r'Receipt written to (.*)', results_autopkg_run['stdout']).group(1) - # plist_contents = await utils.plist_reader(run_receipt) - # log.debug("recipe_runner > plist_contents: \n{}\n*** End of plist_contents***".format(plist_contents)) - # for step in reversed(plist_contents): - # jssimporter_results = step - # break - - # changes = jssimporter_results.get('Output').get('jss_changed_objects') - # # Post Slack Message with results - # chatbot.post_dev_results(changes) -##### - if not results_autopkg_run['success']: - - # Post Slack Message with results - log.error("Failed running: {}".format(recipe_id)) - log.error("return code status: {}".format(results_autopkg_run['status'])) - log.error("stdout: {}".format(results_autopkg_run['stdout'])) - log.error("stderr: {}".format(results_autopkg_run['stderr'])) - - try: - run_receipt = re.search( - r'Receipt written to (.*)', results_autopkg_run['stdout']).group(1) - plist_contents = await utils.plist_reader(run_receipt) - - for step in reversed(plist_contents): - if step.get('RecipeError') != None: - run_error = step.get('RecipeError') - break - - except: - run_error = results_autopkg_run['stderr'] - - redacted_error = await utils.replace_sensitive_strings(run_error) - - await api_helper.chat_recipe_error(recipe_id, redacted_error) - - -async def get_user_context(): - - return os.getlogin() == "root" and os.getenv('USER') is None - - -async def main(run_args=sys.argv[1:]): - # log.debug('Recipe Runner:\n\tAll calling args: {}'.format(run_args)) - - ################################################## - # Setup Argparse - - parser = argparse.ArgumentParser(description="Run recipe overrides through AutoPkg.") - - run_type = parser.add_mutually_exclusive_group(required=True) - run_type.add_argument('--all', '-a', action='store_true', required=False, - help='Runs all the recipes in the database.') - run_type.add_argument('--recipe-identifier', '-i', metavar='local.Firefox', type=str, - required=False, help='A recipe identifier.') - - parser.add_argument('--pkgbot-config', '-p', metavar='./config.yaml', type=str, - required=False, help='A config file with defined environmental configurations.') - parser.add_argument('--environment', '-e', type=str, default="dev", required=False, - help='Which environment to use.') - parser.add_argument('--action', choices=[ "promote", "trust" ], required=False, - help='Perform the requested action on the passed recipe id.') - parser.add_argument('--pkg-name', '-n', metavar='Firefox-90.0.pkg', type=str, - required=False, help='The name of the package to match. This is to ensure the version \ - that is promoted matches what is intended.') - parser.add_argument('--error-id', '-id', type=str, required=False, - help='Error ID to map down stream functions too.') - - ################################################## - # Parse Script Arguments - - args, _ = parser.parse_known_args(run_args) - # log.debug('Recipe Runner:\n\tArgparse args: {}'.format(args)) - - if len(run_args) == 0: - parser.print_help() - sys.exit(0) - - elif args.action == "promote" and not args.pkg_name: - parser.print_help() - parser.error('The --promote argument requires the --pkg-name argument.') - - ################################################## - # Bits staged... - - if args.pkgbot_config: - config.load(pkgbot_config=args.pkgbot_config) - - else: - config.load() - - # Get the Console User - results_console_user = await utils.run_process_async("/usr/sbin/scutil", "show State:/Users/ConsoleUser") - console_user = re.sub("(Name : )|(\n)", "", ( re.search("Name : .*\n", results_console_user['stdout']).group(0) )) - - if args.environment == "dev" and not args.action: - - log.info("Checking for private repo updates...") - git_pull_command = "{binary} -C \"{path}\" switch main > /dev/null && {binary} -C \"{path}\" pull && $( {binary} -C \"{path}\" rev-parse --verify trust-updates > /dev/null 2>&1 && {binary} -C \"{path}\" switch trust-updates > /dev/null || {binary} -C \"{path}\" switch -c trust-updates > /dev/null )".format( - binary=config.pkgbot_config.get("Git.binary"), - path="/Users/{}/Library/AutoPkg/RecipeOverrides/".format(console_user) - ) - - if await get_user_context(): - git_pull_command = "su - {} -c \"{}\"".format( - console_user, git_pull_command, - ) - - results_git_pull_command = await utils.run_process_async(git_pull_command) - - if results_git_pull_command["success"]: - log.info(results_git_pull_command["stdout"]) - - else: - log.error("stdout\n:{}".format(results_git_pull_command["stdout"])) - log.error("stderr\n:{}".format(results_git_pull_command["stderr"])) - sys.exit(1) - - log.info("Updating parent recipe repos...") - autopkg_prefs = os.path.abspath(config.pkgbot_config.get("JamfPro_Dev.autopkg_prefs")) - - autopkg_repo_update_command = "{binary} repo-update all --prefs=\"{prefs}\"".format( - binary=config.pkgbot_config.get("AutoPkg.binary"), - prefs=autopkg_prefs - ) - - if await get_user_context(): - autopkg_repo_update_command = "su - {console_user} -c \"{autopkg_repo_update_command}\"".format( - console_user=console_user, - autopkg_repo_update_command=autopkg_repo_update_command, - ) - - results_autopkg_repo_update = await utils.run_process_async(autopkg_repo_update_command) - -##### TO DO: -###### * Add parent recipe repos update success - - if not results_autopkg_repo_update["success"]: - log.error("Failed to update parent recipe repos") - log.error("{}".format(results_autopkg_repo_update["stderr"])) - sys.exit(1) - - if args.recipe_identifier: - - recipe_id = args.recipe_identifier - - recipe_result = ( await api_helper.get_recipe_by_recipe_id(recipe_id) ).json() - - if recipe_id != recipe_result.get("recipe_id"): - log.error("Recipe `{}` was not found in the database".format(recipe_id)) - sys.exit(1) - - recipes = [ recipe_result ] - - else: - results_recipes = ( await api_helper.get_recipes() ).json() - recipes = results_recipes.get("recipes") - - for a_recipe in recipes: - - await autopkg_process_wrapper( - recipe_config=a_recipe, - action=args.action, - console_user=console_user, - pkg_name=args.pkg_name, - error_id=args.error_id - ) - - log.info("Recipe Runner: Complete!") - - -if __name__ == "__main__": - log.debug("Running recipe_runner.main") - asyncio.run( main(sys.argv) ) diff --git a/extras/PkgBot.py b/extras/PkgBot.py deleted file mode 100644 index 9666024..0000000 --- a/extras/PkgBot.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2022 Zack Thompson -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, print_function - -import os -import requests -from datetime import datetime - -from autopkglib import Processor, ProcessorError - - -__all__ = ["PkgBot"] - -class PkgBot(Processor): - description = ("Uses a Slack App (or Bot) to post to a Slack Channel" - "(or User) based on output of a JSSImporter run.") - - input_variables = { - } - output_variables = { - "pkg_data": { - "description": "Dictionary of the package details posted to the PkgBot server." - } - } - - __doc__ = description - - - def authenticate_with_pkgbot(self, server: str, username: str, password: str): - - headers = { - "accept": "application/json", - "Content-Type": "application/x-www-form-urlencoded" - } - - # Request a token based on the provided credentials - response_get_token = requests.post( "{}/auth/token".format(server), - headers=headers, - data="username={}&password={}".format(username, password) - ) - - if response_get_token.status_code == 200: - response_json = response_get_token.json() - return response_json["access_token"] - - - def main(self): - jps_url = self.env.get("JSS_URL") - prod_name = self.env.get("prod_name") - pkg_path = self.env.get("pkg_path") - recipe_id = self.env.get("recipe_id") - jss_changed_objects = self.env.get("jss_changed_objects") - jss_importer_summary_result = self.env.get("jss_importer_summary_result") - - if jss_changed_objects: - jss_uploaded_package = jss_importer_summary_result["data"]["Package"] - - if jss_uploaded_package: - - sw_name = jss_importer_summary_result["data"]["Name"] - sw_version = jss_importer_summary_result["data"]["Version"] - jps_icon_id = jss_importer_summary_result["data"]["Icon ID"] - jps_pkg_id = jss_importer_summary_result["data"]["Package ID"] - pkg_name = os.path.basename(pkg_path) - - pkgbot_server = "{}:{}".format(self.env.get("PKGBOT_URL"), self.env.get("PKGBOT_PORT")) - - token = self.authenticate_with_pkgbot( - pkgbot_server, - self.env.get("API_USERNAME"), - self.env.get("API_PASSWORD") - ) - - if not token: - raise ProcessorError( - 'Failed to authenticate to the PkgBot Server: {}'.format(pkgbot_server)) - - headers = { - "Authorization": "Bearer {}".format(token), - "accept": "application/json", - "Content-Type": "application/json" - } - - if self.env.get("promote"): - self.output("Promoting to Production...") - - workflow = "prod" - format_string = "%Y-%m-%d %H:%M:%S.%f" - promoted_date = datetime.strftime(datetime.now(), format_string) - pkg_data = { - "name": prod_name, - "version": sw_version, - "jps_id_prod": jps_pkg_id, - "recipe_id": recipe_id, - "promoted_date": promoted_date - } - - if jps_icon_id: - pkg_data["icon_id"] = jps_icon_id - pkg_data["jps_url"] = jps_url - - else: - self.output("Posting to dev...") - - workflow = "dev" - pkg_data = { - "name": sw_name, - "version": sw_version, - "icon_id": jps_icon_id, - "jps_id_dev": jps_pkg_id, - "jps_url": jps_url, - "pkg_name": pkg_name, - "recipe_id": recipe_id - } - - # try: - response = requests.post('{}/autopkg/workflow/{}'.format(pkgbot_server, workflow), - headers=headers, json=pkg_data) - # except: - - self.env["pkg_data"] = pkg_data - self.env["pkgbot_post_status_code"] = response.status_code - self.env["pkgbot_post_text"] = response.text - self.output("PkgBot Server POST Statuscode: {}".format( - self.env["pkgbot_post_status_code"]), verbose_level=2) - self.output("PkgBot Server POST Response: {}".format( - self.env["pkgbot_post_text"]), verbose_level=2) - self.output("pkg_data: {}".format( - self.env["pkg_data"]), verbose_level=2) - - if response.status_code != 200: - raise ProcessorError("ERROR: POST request to the PkgBot service returned statuscode " - "{}, with a response of:\n{}".format( - self.env["pkgbot_post_status_code"], self.env["pkgbot_post_text"])) - - else: - self.output('Package was not uploaded into Jamf Pro.') - - -if __name__ == "__main__": - processor = PkgBot() - processor.execute_shell() diff --git a/extras/ProudctionTemplate.jss.recipe b/extras/ProudctionTemplate.jss.recipe deleted file mode 100644 index aa2eef5..0000000 --- a/extras/ProudctionTemplate.jss.recipe +++ /dev/null @@ -1,71 +0,0 @@ - - - - - Description - This is a shell template recipe that is used to pull details from a "dev" run for a package and create a "production" policy for a particular title. - Identifier - com.github.mlbz521.jss.ProductionTemplate - Input - - GROUP_NAME - - GROUP_TEMPLATE - - - Process - - - Arguments - - recipe_id - %recipe_id% - custom_variables - - CUSTOM_TRIGGER - EXCLUSION - EXCLUSION2 - EXCLUSION3 - SCRIPT_NAME - SCRIPT_PARAMETER_1 - SCRIPT_PARAMETER_2 - SCRIPT_PARAMETER_3 - SCRIPT_PARAMETER_4 - SCRIPT_PARAMETER_5 - SCRIPT_PARAMETER_6 - SCRIPT_PARAMETER_7 - SCRIPT_PARAMETER_8 - SCRIPT_PRIORITY - - - Processor - JSSRecipeReceiptChecker - - - Arguments - - category - %CATEGORY% - pkg_path - %pkg_path% - prod_name - %prod_name% - policy_category - %CATEGORY% - policy_template - %POLICY_TEMPLATE% - self_service_description - %SELF_SERVICE_DESCRIPTION% - self_service_icon - %SELF_SERVICE_ICON% - version - %version% - - Comment - Self Service install policy - Processor - JSSImporter - - - - \ No newline at end of file diff --git a/extras/com.github.mlbz521.autopkg.service.plist b/extras/com.github.mlbz521.autopkg.service.plist deleted file mode 100644 index d823635..0000000 --- a/extras/com.github.mlbz521.autopkg.service.plist +++ /dev/null @@ -1,35 +0,0 @@ - - - - - Label - com.github.mlbz521.autopkg.service - ProgramArguments - - /usr/local/autopkg/python - -m - execute.autopkg - run - --all - --environment - dev - --pkgbot_config - /Library/AutoPkg/PkgBotServer/settings/pkgbot_config.yaml - - KeepAlive - - EnvironmentVariables - - PYTHONPATH - /Library/AutoPkg/PkgBotServer - - AbandonProcessGroup - - StartInterval - 14400 - StandardErrorPath - /Users/ahlbetsroff/Library/Logs/PkgBot.AutoPkg.Service.log - StandardOutPath - /Users/ahlbetsroff/Library/Logs/PkgBot.AutoPkg.Service.log - - diff --git a/extras/examples/Encountered and Error.png b/extras/examples/Encountered and Error.png deleted file mode 100644 index ac34b6b..0000000 Binary files a/extras/examples/Encountered and Error.png and /dev/null differ diff --git a/log/PkgBotServer-Debug.log b/log/PkgBotServer-Debug.log deleted file mode 100644 index 791822a..0000000 --- a/log/PkgBotServer-Debug.log +++ /dev/null @@ -1,4 +0,0 @@ -2022-06-13 17:55:57,586 | PkgBot | DEBUG | pkgbot.py:62 - load_config() | PkgBot.Load_Config: - All calling args: ['./pkgbot.py'] -2022-06-13 17:55:57,586 | PkgBot | DEBUG | pkgbot.py:72 - load_config() | PkgBot.Load_Config: - Argparse args: (Namespace(pkgbot_config=None), ['./pkgbot.py']) diff --git a/log/PkgBotServer.HTTP.Access.log b/log/PkgBotServer.HTTP.Access.log deleted file mode 100644 index ec7cbfa..0000000 --- a/log/PkgBotServer.HTTP.Access.log +++ /dev/null @@ -1,11 +0,0 @@ -2022-06-13 17:56:10,508 | uvicorn.access | INFO | 127.0.0.1:58599 - "GET / HTTP/1.1" 200 OK -2022-06-13 17:56:13,488 | uvicorn.access | INFO | 127.0.0.1:58599 - "GET /packages HTTP/1.1" 200 OK -2022-06-13 17:56:14,026 | uvicorn.access | INFO | 127.0.0.1:58599 - "GET /recipes HTTP/1.1" 200 OK -2022-06-13 17:56:14,676 | uvicorn.access | INFO | 127.0.0.1:58599 - "GET /packages HTTP/1.1" 200 OK -2022-06-13 17:56:14,993 | uvicorn.access | INFO | 127.0.0.1:58599 - "GET / HTTP/1.1" 200 OK -2022-06-13 17:56:17,684 | uvicorn.access | INFO | 127.0.0.1:58599 - "GET /api HTTP/1.1" 200 OK -2022-06-13 17:56:17,888 | uvicorn.access | INFO | 127.0.0.1:58599 - "GET /openapi.json HTTP/1.1" 200 OK -2022-06-13 17:56:32,129 | uvicorn.access | INFO | :0 - "GET / HTTP/1.1" 200 OK -2022-06-13 17:56:32,609 | uvicorn.access | INFO | :0 - "GET /favicon.ico HTTP/1.1" 404 Not Found -2022-06-13 17:56:36,002 | uvicorn.access | INFO | :0 - "GET /api HTTP/1.1" 200 OK -2022-06-13 17:56:36,348 | uvicorn.access | INFO | :0 - "GET /openapi.json HTTP/1.1" 200 OK diff --git a/log/PkgBotServer.log b/log/PkgBotServer.log deleted file mode 100644 index 0500c17..0000000 --- a/log/PkgBotServer.log +++ /dev/null @@ -1,10 +0,0 @@ -2022-06-13 17:55:57,603 | uvicorn.error | INFO | Uvicorn running on http://localhost:8000 (Press CTRL+C to quit) -2022-06-13 17:55:57,603 | uvicorn.error | INFO | Started reloader process [45077] using statreload -2022-06-13 17:55:58,397 | uvicorn.error | INFO | Started server process [45079] -2022-06-13 17:55:58,397 | uvicorn.error | INFO | Waiting for application startup. -2022-06-13 17:55:58,433 | uvicorn.error | INFO | Application startup complete. -2022-06-13 17:58:05,276 | uvicorn.error | INFO | Shutting down -2022-06-13 17:58:05,378 | uvicorn.error | INFO | Waiting for application shutdown. -2022-06-13 17:58:05,385 | uvicorn.error | INFO | Application shutdown complete. -2022-06-13 17:58:05,385 | uvicorn.error | INFO | Finished server process [45079] -2022-06-13 17:58:05,583 | uvicorn.error | INFO | Stopping reloader process [45077] diff --git a/logs/.gitignore b/logs/.gitignore new file mode 100644 index 0000000..86d0cb2 --- /dev/null +++ b/logs/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +!.gitignore \ No newline at end of file diff --git a/pkgbot.py b/pkgbot.py deleted file mode 100755 index 09f9e7f..0000000 --- a/pkgbot.py +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/local/autopkg/python - -import argparse -import multiprocessing -import sys - -sys.path.insert(0, "/Library/AutoPkg/PkgBot") - -import asyncio -import secure -import uvicorn - -from fastapi import FastAPI -from fastapi.responses import RedirectResponse -from fastapi.middleware.httpsredirect import HTTPSRedirectMiddleware - -from tortoise.contrib.fastapi import register_tortoise - -import config, utils -from db import models -from api import auth, autopkg, package, recipe, settings, user, views -from api.slack import bot, build_msg, send_msg - - -log = utils.log - -app = FastAPI( - title="PkgBot API", - description="A framework to manage software packaging, testing, and promoting from a " - "development to production environment.", - version="0.1.0", - openapi_tags=settings.tags_metadata, - docs_url="/api" -) - -app.include_router(views.router) -app.include_router(auth.router) -app.include_router(autopkg.router) -app.include_router(package.router) -app.include_router(recipe.router) -app.include_router(bot.router) -app.include_router(build_msg.router) -app.include_router(send_msg.router) -app.include_router(user.router) - -register_tortoise( - app, - config = settings.TORTOISE_CONFIG, - generate_schemas = True, - add_exception_handlers = True -) - - -async def number_of_workers(): - number_of_threads = (multiprocessing.cpu_count() * 2) - 1 - log.debug("Number of workers: {}".format(number_of_threads)) - return number_of_threads - - -def load_config(cli_args=None): - - log.debug('PkgBot.Load_Config:\n\tAll calling args: {}'.format(cli_args)) - - parser = argparse.ArgumentParser(description="PkgBot Main.") - parser.add_argument( - '--pkgbot_config', '-pc', - metavar='./pkgbot.config', - default=None, type=str, required=False, - help='A config file with defined environmental configurations.') - args = parser.parse_known_args(cli_args) - - log.debug('PkgBot.Load_Config:\n\tArgparse args: {}'.format(args)) - - if len(sys.argv) != 0: - - config.load(args) - - else: - - parser.print_help() - sys.exit(0) - - -@app.on_event("startup") -async def startup_event(): - - pkgbot_admins = config.pkgbot_config.get("PkgBot.Admins") - - for admin in pkgbot_admins: - - user_object = models.PkgBotAdmin_In( - username = admin, - slack_id = pkgbot_admins.get( admin ), - full_admin = True - ) - - await user.create_or_update_user( user_object ) - - -# Add an exception handler to the app instance -# Used for the login/auth logic for the HTTP views -app.add_exception_handler(auth.NotAuthenticatedException, auth.exc_handler) -auth.login_manager.useRequest(app) - -if config.pkgbot_config.get("PkgBot.enable_ssl"): - - # Enforces that all incoming requests must be https. - app.add_middleware(HTTPSRedirectMiddleware) - - server = secure.Server().set("Secure") - - hsts = secure.StrictTransportSecurity().include_subdomains().preload().max_age(2592000) - - cache_value = secure.CacheControl().must_revalidate() - - secure_headers = secure.Secure( - server=server, - # csp=csp, - hsts=hsts, - # referrer=referrer, - # permissions=permissions_value, - cache=cache_value, - ) - - @app.middleware("http") - async def set_secure_headers(request, call_next): - response = await call_next(request) - secure_headers.framework.fastapi(response) - return response - - -if __name__ == "__main__": - - # Load Configuration - load_config(cli_args=sys.argv) - - uvicorn.run( - "pkgbot:app", - reload = config.pkgbot_config.get("PkgBot.keep_alive"), - host = config.pkgbot_config.get("PkgBot.host"), - port = config.pkgbot_config.get("PkgBot.port"), - log_config = config.pkgbot_config.get("PkgBot.log_config"), - log_level = config.pkgbot_config.get("PkgBot.uvicorn_log_level"), - # workers = asyncio.run( number_of_workers() ), - # ssl_keyfile = config.pkgbot_config.get("PkgBot.ssl_keyfile"), - # ssl_certfile = config.pkgbot_config.get("PkgBot.ssl_certfile") - ) diff --git a/api/slack/__init__.py b/pkgbot/__init__.py similarity index 100% rename from api/slack/__init__.py rename to pkgbot/__init__.py diff --git a/pkgbot/api/__init__.py b/pkgbot/api/__init__.py new file mode 100644 index 0000000..9a6616f --- /dev/null +++ b/pkgbot/api/__init__.py @@ -0,0 +1,10 @@ +from . import auth +from . import autopkg +from . import package +from . import recipe +from . import user +from . import views +from .slack import block_builders +from .slack import bot +from .slack import build_msg +from .slack import send_msg diff --git a/api/auth.py b/pkgbot/api/auth.py similarity index 65% rename from api/auth.py rename to pkgbot/api/auth.py index b394e41..c6a78e5 100644 --- a/api/auth.py +++ b/pkgbot/api/auth.py @@ -1,9 +1,7 @@ -#!/usr/local/autopkg/python - import os -from datetime import datetime, timedelta +from datetime import timedelta -import requests +import httpx from fastapi import APIRouter, Depends, HTTPException, status, Request from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm @@ -12,25 +10,25 @@ from fastapi_login import LoginManager from fastapi_login.exceptions import InvalidCredentialsException -import config, utils -from db import models -from api import settings, user +from pkgbot import config, settings +from pkgbot.api import user +from pkgbot.db import models +from pkgbot.utilities import common as utility -config.load() -log = utils.log +config = config.load_config() +log = utility.log LOGIN_SECRET = os.urandom(1024).hex() - -jps_url = config.pkgbot_config.get("JamfPro_Prod.jps_url") +jps_url = config.JamfPro_Prod.get("jps_url") oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token") login_manager = LoginManager(LOGIN_SECRET, token_url="/auth/login", use_cookie=True) login_manager.cookie_name = "PkgBot_Cookie" -templates = Jinja2Templates(directory=config.pkgbot_config.get("PkgBot.jinja_templates")) +templates = Jinja2Templates(directory=config.PkgBot.get("jinja_templates")) router = APIRouter( prefix = "/auth", tags = ["auth"], - responses = settings.custom_responses + responses = settings.api.custom_responses ) @@ -49,19 +47,16 @@ async def exc_handler(request, exc): async def authenticate_user(username: str, password: str): # Request a token based on the provided credentials - response_get_token = requests.post( "{}/api/v1/auth/token".format(jps_url), - auth=( username, password ) ) + async with httpx.AsyncClient() as client: + response_get_token = await client.post( + f"{jps_url}/api/v1/auth/token", auth=(username, password)) if response_get_token.status_code == 200: response_json = response_get_token.json() - sites = await user_authorizations( response_json["token"] ) - - user_model = models.PkgBotAdmin_In( - username = username, - ) - - user_exists = await user.get_user( user_model ) + sites = await user_authorizations(response_json["token"]) + user_model = models.PkgBotAdmin_In(username=username) + user_exists = await user.get_user(user_model) if len(user_exists) <= 1: @@ -69,7 +64,7 @@ async def authenticate_user(username: str, password: str): username = username, full_admin = user_exists[0].full_admin if user_exists else False, jps_token = response_json["token"], - jps_token_expires = await utils.string_to_datetime( + jps_token_expires = await utility.string_to_datetime( response_json["expires"], "%Y-%m-%dT%H:%M:%S.%fZ"), site_access = ', '.join(sites) ) @@ -81,11 +76,10 @@ async def authenticate_user(username: str, password: str): async def user_authorizations(token: str = Depends(oauth2_scheme)): - # GET all user details - response_user_details = requests.get( - "{}/api/v1/auth".format(jps_url), - headers={ "Authorization": "jamf-token {}".format(token) } - ) + # Get all user details + async with httpx.AsyncClient() as client: + response_user_details = await client.get( + f"{jps_url}/api/v1/auth", headers={ "Authorization": f"jamf-token {token}" }) # Get the response content from the API user_details = response_user_details.json() @@ -104,48 +98,41 @@ async def user_authorizations(token: str = Depends(oauth2_scheme)): if int(site["id"]) in site_ids: site_names.append(site["name"]) - except: + except Exception: pass return site_names -@login_manager.user_loader +@login_manager.user_loader() async def load_user(username: str): +##### This can be improved ^^^ + user_model = models.PkgBotAdmin_In(username=username) + # Return the user object otherwise None if a user was not found + return await user.get_user(user_model) or None - user_model = models.PkgBotAdmin_In( - username = username, - ) - - user_object = await user.get_user(user_model) - if user_object: - return user_object - - # User not found - return None - - -@router.post("/login", summary="Login to web views", +@router.post("/login", summary="Login to web views", description="Handles authentication on web views.") async def login(request: Request, form_data: OAuth2PasswordRequestForm = Depends()): - user = await authenticate_user( form_data.username, form_data.password ) + user = await authenticate_user(form_data.username, form_data.password) if not user: - session = { "logged_in": False } - return templates.TemplateResponse("index.html", { "request": request, "session": session }) + return templates.TemplateResponse( + "index.html", { "request": request, "session": { "logged_in": False } }) access_token = login_manager.create_access_token( - data = { "sub": form_data.username }, expires = timedelta(minutes=config.pkgbot_config.get("PkgBot.token_valid_for"))) + data = { "sub": form_data.username }, + expires = timedelta(minutes=config.PkgBot.get("token_valid_for")) + ) response = RedirectResponse(url="/", status_code=status.HTTP_302_FOUND) login_manager.set_cookie(response, access_token) - return response -@router.post("/logout", summary="Logout of web views", +@router.post("/logout", summary="Logout of web views", description="Handles logging out of web views.") async def logout(response: HTMLResponse): @@ -154,14 +141,13 @@ async def logout(response: HTMLResponse): return response -@router.post("/token", summary="Request a JWT", +@router.post("/token", summary="Request a JWT", description="Handles acquiring a JSON Web Token for use with the PkgBot API.") async def create_token(form_data: OAuth2PasswordRequestForm = Depends()): - user = await authenticate_user( form_data.username, form_data.password ) + user = await authenticate_user(form_data.username, form_data.password) if not user: - raise HTTPException( status_code = status.HTTP_401_UNAUTHORIZED, detail = "Invalid credentials or not a Site Admin" @@ -170,17 +156,15 @@ async def create_token(form_data: OAuth2PasswordRequestForm = Depends()): return { "access_token": user.jps_token, "token_type": "bearer" } -# @router.get("/test", summary="Return user's JWT", +# @router.get("/test", summary="Return user's JWT", # description="Test endpoint to return the current user's token.") # async def test(user: models.PkgBotAdmin_In = Depends(user.get_current_user)): # return { "token": user.jps_token } -@router.get("/authorizations", summary="Check user permissions", +@router.get("/authorizations", summary="Check user permissions", description="Returns the authenticated user's permissions (e.g. Site access).") async def authorizations(user: models.PkgBotAdmin_In = Depends(user.get_current_user)): - sites = await user_authorizations( user.jps_token ) - - return { "sites": sites } + return { "sites": await user_authorizations(user.jps_token) } diff --git a/pkgbot/api/autopkg.py b/pkgbot/api/autopkg.py new file mode 100644 index 0000000..b404f41 --- /dev/null +++ b/pkgbot/api/autopkg.py @@ -0,0 +1,411 @@ +import hashlib +import hmac +import json + +from datetime import datetime +from tempfile import SpooledTemporaryFile + +from fastapi import APIRouter, Body, Depends, Header, HTTPException, Request, UploadFile + +from fastapi_utils.tasks import repeat_every + +from celery.result import AsyncResult + +from pkgbot import api, config, settings +from pkgbot.db import models +from pkgbot.tasks import task, task_utils +from pkgbot.utilities import common as utility + + +config = config.load_config() +log = utility.log +router = APIRouter( + prefix = "/autopkg", + tags = ["autopkg"], + responses = settings.api.custom_responses +) + + +@router.get("/results/{task_id}", summary="Get the results of an autopkg task", + description="Check if a task has completed and it's results.", + dependencies=[Depends(api.user.get_current_user)]) +async def results(task_id: str): + + log.debug(f"Checking results for task_id: {task_id}") + task_results = task_utils.get_task_results(task_id) + + if task_results.status != "SUCCESS": + return { "current_status": task_results.status } + + elif task_results.result != None: + + if sub_task_ids := (task_results.result).get("Queued background tasks", None): + sub_tasks = [] + + for sub_task in sub_task_ids: + + if isinstance(sub_task, AsyncResult): + sub_task_result = task_utils.get_task_results(sub_task.task_id) + + if isinstance(sub_task, str): + sub_task_result = task_utils.get_task_results(sub_task) + + sub_tasks.append({sub_task_result.task_id: sub_task_result.status}) + + return { "sub_task_results": sub_tasks } + + elif isinstance(task_results.result, dict): + return { "task_results": await utility.replace_sensitive_strings(task_results.result) } + + else: + return { "task_completion_status": task_results.status } + + +@router.post("/workflow/dev", summary="Dev Workflow", + description="The Dev workflow will create a new package and post to chat.", + dependencies=[Depends(api.user.verify_admin)]) +# async def dev(pkg_object: models.Package_In = Body(..., pkg_object=Depends(models.Package_In))): +async def workflow_dev(pkg_object: models.Package_In = Body()): + """Workflow to create a new package in the database and then post a message to chat. + + Args: + pkg_object (models.Package_In): Details about a package object + + Returns: + [JSON]: Result of the operation + """ + + created_pkg = await api.package.create(pkg_object) + results = await api.send_msg.new_pkg_msg(created_pkg) + pkg_db_object = await models.Packages.filter(id=created_pkg.id).first() + pkg_db_object.slack_ts = results.get("ts") + pkg_db_object.slack_channel = results.get("channel") + await pkg_db_object.save() + return { "result": "Success" } + + +@router.post("/workflow/prod", summary="Production Workflow", + description="Workflow to move a package into production and update the Slack message.", + dependencies=[Depends(api.user.verify_admin)]) +# async def prod(pkg_object: models.Package_In = Body(..., pkg_object=Depends(models.Package_In))): +async def workflow_prod(promoted_id: int, pkg_object: models.Package_In = Body()): + + if pkg_object.promoted_date is None: + date_to_convert = datetime.now() + else: + date_to_convert = pkg_object.promoted_date + + pkg_db_object = await models.Packages.filter(id=promoted_id).first() + pkg_object.promoted_date = await utility.utc_to_local(date_to_convert) + pkg_object.recipe_id = pkg_db_object.recipe_id + pkg_object.status = "prod" + updated_pkg_object = await api.package.update(promoted_id, pkg_object) + return await api.send_msg.promote_msg(updated_pkg_object) + + +@router.on_event("startup") +@repeat_every(seconds=config.Services.get("autopkg_service_start_interval"), wait_first=True) +@router.post("/run/recipes", summary="Run all recipes", + description="Runs all recipes in a background task.", + dependencies=[Depends(api.user.verify_admin)]) +async def autopkg_run_recipes( + autopkg_options: models.AutoPkgCMD = Depends(models.AutoPkgCMD), called_by: str = "schedule"): + """Run all recipes in the database. + + Args: + autopkg_options (dict): A dictionary that will be used as + autopkg_options to the `autopkg` binary + + Returns: + dict: Dict describing the results of the ran process + """ + + log.info("Running all recipes") + + if not isinstance(autopkg_options, models.AutoPkgCMD): + autopkg_options = models.AutoPkgCMD() + + # callback = await determine_callback(called_by) + recipe_filter = models.Recipe_Filter(**{"enabled": True, "manual_only": False}) + recipes = (await api.recipe.get_recipes(recipe_filter)).get("recipes") + recipes = [ a_recipe.dict() for a_recipe in recipes ] + queued_task = task.autopkg_run.apply_async( + (recipes, autopkg_options.dict(), called_by), queue="autopkg", priority=3) + return { "result": "Queued background task" , "task_id": queued_task.id } + + +@router.post("/run/recipe/{recipe_id}", summary="Executes a recipes", + description="Executes a recipe in a background task.", + dependencies=[Depends(api.user.get_current_user)]) +async def autopkg_run_recipe(recipe_id: str, called_by: str = "schedule", + autopkg_options: models.AutoPkgCMD = Depends(models.AutoPkgCMD)): + """Runs the passed recipe id. + + Args: + recipe (str): Recipe ID of a recipe + autopkg_options (str): + + Returns: + dict: Dict describing the results of the ran process + """ + + log.info(f"Running recipe: {recipe_id}") + + if autopkg_options.dict().get("promote"): + + pkg_object = await models.Package_Out.from_queryset_single( + models.Packages.get( + recipe_id=recipe_id, pkg_name=autopkg_options.dict().get("match_pkg")) + ) + + return await api.package.promote_package(id=pkg_object.dict().get("id")) + + a_recipe = await api.recipe.get_by_recipe_id(recipe_id) + + if a_recipe.dict().get("enabled"): + queued_task = task.autopkg_run.apply_async( + ([ a_recipe.dict() ], autopkg_options.dict(), called_by), queue="autopkg", priority=3) + + return { "result": "Queued background task" , "task_id": queued_task.id } + + log.info(f"Recipe '{recipe_id}' is disabled.") + return { "result": "Recipe is disabled" } + + +@router.post("/verify-trust/recipe/{recipe_id}", summary="Validates a recipes trust info", + description="Validates a recipes trust info in a background task.", + dependencies=[Depends(api.user.get_current_user)]) +async def autopkg_verify_recipe(recipe_id: str, called_by: str = "slack", + autopkg_options: models.AutoPkgCMD = Depends(models.AutoPkgCMD)): + """Runs the passed recipe id. + + Args: + recipe (str): Recipe ID of a recipe + autopkg_options (str): + + Returns: + dict: Dict describing the results of the ran process + """ + + a_recipe = await api.recipe.get_by_recipe_id(recipe_id) + + queued_task = task.autopkg_verify_trust.apply_async( + ( + a_recipe.dict().get("recipe_id"), + autopkg_options.dict(exclude_unset=True, exclude_none=True), + called_by + ), + queue="autopkg", priority=6 + ) + + return { "result": "Queued background task" , "task_id": queued_task.id } + + +@router.post("/receive", summary="Handles incoming task messages with autopkg results", + description="This endpoint receives incoming messages from tasks and calls the required " + "actions based on the message after verifying the authenticity of the source.") +async def receive(request: Request, task_id = Body()): + + # To prevent memory allocation attacks + # if content_length > 1_000_000: + # log.error(f"Content too long ({content_length})") + # response.status_code = 400 + # return {"result": "Content too long"} + + if not await verify_pkgbot_webhook(request): + raise HTTPException(status_code=401, detail="Failed to authenticate webhook.") + + task_id = task_id.get("task_id") + log.debug(f"Receiving notification for task_id: {task_id}") + task_results = task_utils.get_task_results(task_id) + event = task_results.result.get("event") + event_id = task_results.result.get("event_id", "") + called_by = task_results.result.get("called_by") + recipe_id = task_results.result.get("recipe_id") + success = task_results.result.get("success") + stdout = task_results.result.get("stdout") + stderr = task_results.result.get("stderr") + + if event == "verify_trust_info": + callback = await determine_callback(called_by) + + if callback == "PkgBot": + + if success: + # This shouldn't ever be called? + log.info(f"Trust info verified for: {recipe_id}") + + else: + # Send message that recipe_id failed verify-trust-info + redacted_error = await utility.replace_sensitive_strings(stderr) + await api.recipe.recipe_trust_verify_failed(recipe_id, redacted_error) + + elif callback == "ephemeral": +##### TO DO: + log.debug("Recipe trust info was checked via Slack command.") + # Post ephemeral msg to Slack user + + if success: + # trust info verified msg + pass + else: + # trust info invalid msg + pass + + elif event == "update_trust_info": + """ Update message with result of update-trust-info attempt """ + + if success: + await api.recipe.recipe_trust_update_success(event_id) + else: + await api.recipe.recipe_trust_update_failed(event_id, str(stderr)) + + elif event == "error": + # Post message with results + log.error(f"Failed running: {recipe_id}") + + try: + plist_contents = await utility.find_receipt_plist(stdout) + run_error = await utility.parse_recipe_receipt(plist_contents, "RecipeError") + except Exception: + run_error = stderr + + redacted_error = await utility.replace_sensitive_strings(run_error) + + if event == "recipe_run_prod": + # Promotion Failed +##### Possible ideas: + # Thread the error message with the original message? + # Post Ephemeral Message to PkgBot Admin? + + # Get the recipe that failed to be promoted + pkg_db_object = await models.Packages.filter(id=event_id).first() + recipe_id = pkg_db_object.recipe_id + software_title = pkg_db_object.name + software_version = pkg_db_object.version + log.error(f"Failed to promote: {pkg_db_object.pkg_name}") + + redacted_error = { + "Failed to promote:": f"{software_title} v{software_version}", + "Error:": redacted_error + } + + await api.recipe.recipe_error(recipe_id, redacted_error, task_id) + + elif event in ("recipe_run_dev", "recipe_run_prod"): + + if not success: + log.error(f"Uncaught error in autopkg > receive; review task_id: {task_id}") + + else: + plist_contents = await utility.find_receipt_plist(stdout) + + # Get the log info for PackageUploader + pkg_processor = await utility.parse_recipe_receipt( + plist_contents, "JamfPackageUploader") + pkg_name = pkg_processor.get("Output").get("pkg_name") + pkg_data = { + "name": (pkg_name).rsplit("-", 1)[0], + "pkg_name": pkg_name, + "recipe_id": recipe_id, + "version": pkg_processor.get("Input").get("version"), + "notes": pkg_processor.get("Input").get("pkg_notes") + } + + if event == "recipe_run_dev": + + try: + # Get the log info for PolicyUploader + policy_processor = await utility.parse_recipe_receipt( + plist_contents, "JamfPolicyUploader") + policy_results = policy_processor.get( + "Output").get("jamfpolicyuploader_summary_result").get("data") + pkg_data["icon"] = policy_results.get("icon") + + # Create a temporary file to hold the icon data and upload it. + # This is required since we're not actually using an + # HTTP client to interface with the API endpoint. + icon_data = SpooledTemporaryFile() + with open(policy_results.get("icon_path"), "rb") as icon_path: + icon_data.write(icon_path.read()) + _ = icon_data.seek(0) + icon = UploadFile(filename=pkg_data["icon"], file=icon_data) + await api.views.upload_icon(icon) + + except Exception: + log.info( + f"An icon was not identified, therefore it was not uploaded into PkgBot. Review task_id: {task_id}") + + # No, don't check the processor summary... + # if pkg_processor.get("Output").get("pkg_uploaded"): + + # Instead, check if the package has already been created in the database, this + # ensures a message is posted if it failed to post previously. + pkg_db_object = await models.Packages.filter(pkg_name=pkg_name).first() + + if not pkg_db_object: + log.info(f"New package posted to dev: {pkg_name}") + await workflow_dev(models.Package_In(**pkg_data)) + + # Update attributes for this recipe + recipe_object = await models.Recipes.filter(recipe_id=recipe_id).first() + recipe_object.last_ran = await utility.utc_to_local(datetime.now()) + recipe_object.recurring_fail_count = 0 + await recipe_object.save() + + elif event == "recipe_run_prod": + log.info(f"Package promoted to production: {pkg_name}") + + format_string = "%Y-%m-%d %H:%M:%S.%f" + promoted_date = datetime.strftime(datetime.now(), format_string) + pkg_data["promoted_date"] = promoted_date + + await workflow_prod(event_id, models.Package_In(**pkg_data)) + + return { "result": 200 } + + +async def determine_callback(caller: str): + + if caller == "schedule": + return "PkgBot" + + if caller == "slack": + return "ephemeral" + + if caller == "api": + return "api" + + +async def verify_pkgbot_webhook(request: Request): + + try: +##### Add a timestamp check + # slack_timestamp = request.headers.get("X-Slack-Request-Timestamp") + + # if abs(time.time() - int(slack_timestamp)) > 60 * 5: + # # The request timestamp is more than five minutes from local time. + # # It could be a replay attack, so let's ignore it. + # return False + + body = json.loads(await request.body()) + + digest = await utility.compute_hex_digest( + config.PkgBot.get("webhook_secret").encode("UTF-8"), + str(body).encode("UTF-8"), + hashlib.sha512 + ) + + if hmac.compare_digest( + digest.encode("UTF-8"), + (request.headers.get("x-pkgbot-signature")).encode("UTF-8") + ): + # log.debug("Valid PkgBot Webhook message") + return True + + log.warning("Invalid PkgBot Webhook message!") + return False + + except Exception: + log.error("Exception attempting to validate PkgBot Webhook!") + return False diff --git a/pkgbot/api/package.py b/pkgbot/api/package.py new file mode 100644 index 0000000..27179df --- /dev/null +++ b/pkgbot/api/package.py @@ -0,0 +1,92 @@ +from fastapi import APIRouter, Depends, HTTPException, status + +from pkgbot import api, settings +from pkgbot.db import models +from pkgbot.utilities import common as utility +from pkgbot.tasks import task + + +log = utility.log +router = APIRouter( + prefix = "/package", + tags = ["package"], + responses = settings.api.custom_responses +) + + +@router.get("/", summary="Get all packages", description="Get all packages in the database.", + dependencies=[Depends(api.user.get_current_user)], response_model=dict) +async def get_packages(): + + packages = await models.Package_Out.from_queryset(models.Packages.all()) + return { "total": len(packages), "packages": packages } + + +@router.get("/id/{id}", summary="Get package by id", description="Get a package by its id.", + dependencies=[Depends(api.user.get_current_user)], response_model=models.Package_Out) +async def get_package_by_id(id: int): + + return await models.Package_Out.from_queryset_single(models.Packages.get(id=id)) + + +@router.post("/", summary="Create a package", description="Create a package.", + dependencies=[Depends(api.user.verify_admin)], response_model=models.Package_Out) +async def create(pkg_object: models.Package_In = Depends(models.Package_In)): + + created_pkg = await models.Packages.create( + **pkg_object.dict(exclude_unset=True, exclude_none=True)) + return await models.Package_Out.from_tortoise_orm(created_pkg) + + +@router.put("/id/{id}", summary="Update package by id", description="Update a package by id.", + dependencies=[Depends(api.user.verify_admin)], response_model=models.Package_Out) +async def update(id: int, pkg_object: models.Package_In = Depends(models.Package_In)): + + if type(pkg_object) != dict: + pkg_object = pkg_object.dict(exclude_unset=True, exclude_none=True) + + await models.Packages.filter(id=id).update(**pkg_object) + return await models.Package_Out.from_queryset_single(models.Packages.get(id=id)) + + +@router.delete("/id/{id}", summary="Delete package by id", description="Delete a package by id.", + dependencies=[Depends(api.user.verify_admin)]) +async def delete_package_by_id(id: int): + + delete_object = await models.Packages.filter(id=id).delete() + + if not delete_object: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Package does not exist.") + else: + return { "result": f"Successfully deleted package id: {id}" } + + +@router.post("/promote", summary="Promote package to production", + description="Promote a package to production by id.", + dependencies=[Depends(api.user.verify_admin)]) +async def promote_package(id: int): + + pkg_object = await get_package_by_id(id) + recipe = await api.recipe.get_by_recipe_id(pkg_object.recipe_id) + + autopkg_options = { + "promote": True, + "match_pkg": pkg_object.dict().get("pkg_name"), + "pkg_id": pkg_object.dict().get("id") + } + + queued_task = task.autopkg_run.apply_async( + ([recipe.dict()], autopkg_options, "slack"), queue='autopkg', priority=4) + + return { "result": "Queued background task" , "task_id": queued_task.id } + + +@router.post("/deny", summary="Do not promote package to production", + description="Performs the necessary actions when a package is not approved to production use.", + dependencies=[Depends(api.user.verify_admin)]) +async def deny_package(id: int = Depends(get_package_by_id)): + + pkg_object = await models.Packages.filter(id=id).first() + pkg_object.status = "Denied" + await pkg_object.save() + return await api.send_msg.deny_pkg_msg(await models.Package_Out.from_tortoise_orm(pkg_object)) diff --git a/pkgbot/api/recipe.py b/pkgbot/api/recipe.py new file mode 100644 index 0000000..c4e57a9 --- /dev/null +++ b/pkgbot/api/recipe.py @@ -0,0 +1,240 @@ +from functools import reduce + +from fastapi import APIRouter, Body, Depends, HTTPException, status + +from pkgbot import api, settings +from pkgbot.db import models +from pkgbot.utilities import common as utility +from pkgbot.tasks import task + + +log = utility.log +router = APIRouter( + prefix = "/recipe", + tags = ["recipe"], + responses = settings.api.custom_responses +) + + +@router.get("s/", summary="Get all recipes", description="Get all recipes in the database.", + dependencies=[Depends(api.user.get_current_user)], response_model=dict) +async def get_recipes(recipe_filter: models.Recipe_Filter = Depends(models.Recipe_Filter)): + + if isinstance(recipe_filter, models.Recipe_Filter): + recipes = await models.Recipe_Out.from_queryset( + models.Recipes.filter(**recipe_filter.dict(exclude_unset=True, exclude_none=True))) + else: + recipes = await models.Recipe_Out.from_queryset(models.Recipes.all()) + + return { "total": len(recipes), "recipes": recipes } + + +@router.get("/id/{id}", summary="Get recipe by id", description="Get a recipe by its id.", + dependencies=[Depends(api.user.get_current_user)], response_model=models.Recipe_Out) +async def get_by_id(id: int): + + return await models.Recipe_Out.from_queryset_single(models.Recipes.get(id=id)) + + +@router.get("/recipe_id/{recipe_id}", summary="Get recipe by recipe_id", + description="Get a recipe by its recipe_id.", + dependencies=[Depends(api.user.get_current_user)], response_model=models.Recipe_Out) +async def get_by_recipe_id(recipe_id: str): + + return await models.Recipe_Out.from_queryset_single(models.Recipes.get(recipe_id=recipe_id)) + + +@router.post("/", summary="Create a recipe", description="Create a recipe.", + dependencies=[Depends(api.user.verify_admin)], response_model=models.Recipe_Out) +# async def create(recipe_object: models.Recipe_In = Body(..., recipe_object=Depends(models.Recipe_In))): +async def create(recipe_object: models.Recipe_In = Body()): + + created_recipe = await models.Recipes.create( + **recipe_object.dict(exclude_unset=True, exclude_none=True)) + return await models.Recipe_Out.from_tortoise_orm(created_recipe) + + +@router.put("/id/{id}", summary="Update recipe by id", description="Update a recipe by id.", + dependencies=[Depends(api.user.verify_admin)], response_model=models.Recipe_Out) +async def update_by_id(id: int, recipe_object: models.Recipe_In = Depends(models.Recipe_In)): + + if type(recipe_object) != dict: + recipe_object = recipe_object.dict(exclude_unset=True, exclude_none=True) + + await models.Recipes.filter(id=id).update(**recipe_object) + return await models.Recipe_Out.from_queryset_single(models.Recipes.get(id=id)) + + +@router.put("/recipe_id/{recipe_id}", summary="Update recipe by recipe_id", + description="Update a recipe by recipe_id.", + dependencies=[Depends(api.user.verify_admin)], response_model=models.Recipe_Out) +async def update_by_recipe_id(recipe_id: str, + # recipe_object: models.Recipe_In = Body(..., recipe_object=Depends(models.Recipe_In))): + recipe_object: models.Recipe_In = Body()): + + if type(recipe_object) != dict: + recipe_object = recipe_object.dict(exclude_unset=True, exclude_none=True) + + await models.Recipes.filter(recipe_id=recipe_id).update(**recipe_object) + return await models.Recipe_Out.from_queryset_single(models.Recipes.get(recipe_id=recipe_id)) + + +@router.delete("/id/{id}", summary="Delete recipe by id", description="Delete a recipe by id.", + dependencies=[Depends(api.user.verify_admin)]) +async def delete_by_id(id: int): + + delete_object = await models.Recipes.filter(id=id).delete() + + if not delete_object: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Recipe does not exist.") + else: + return { "result": f"Successfully deleted recipe id: {id}" } + + +@router.delete("/recipe_id/{recipe_id}", summary="Delete recipe by recipe_id", + description="Delete a recipe by recipe_id.", dependencies=[Depends(api.user.verify_admin)]) +async def delete_by_recipe_id(recipe_id: str): + + delete_object = await models.Recipes.filter(recipe_id=recipe_id).delete() + + if not delete_object: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Recipe does not exist.") + else: + return { "result": f"Successfully deleted recipe id: {recipe_id}" } + + +@router.post("/error", summary="Handle recipe errors", + description="This endpoint is called when a recipe errors out during an autopkg run.", + dependencies=[Depends(api.user.verify_admin)]) +async def recipe_error(recipe_id: str, error: str, task_id: str = None): + + # Create DB entry in errors table + error_message = await models.ErrorMessages.create( recipe_id=recipe_id ) + + # Post Slack Message + try: + error_list = error.split(': ') + error_dict = reduce(lambda x, y: {y:x}, error_list[::-1]) + except Exception: + error_dict = { recipe_id: error } + + # Add task_id to error message for easier lookup + error_dict["Task ID"] = task_id + + results = await api.send_msg.recipe_error_msg(recipe_id, error_message.id, error_dict) + + updates = { + "slack_ts": results.get('ts'), + "slack_channel": results.get('channel') + } + + await models.ErrorMessages.update_or_create(updates, id=error_message.id) + + # Mark the recipe disabled + recipe_object = await models.Recipes.filter(recipe_id=recipe_id).first() + + if recipe_object: + recipe_object.enabled = False + recipe_object.recurring_fail_count = recipe_object.recurring_fail_count + 1 + await recipe_object.save() + + return { "result": "Success" } + + +@router.post("/trust/update", summary="Update recipe trust info", + description="Update a recipe's trust information. Runs `autopkg update-trust-info`.", + dependencies=[Depends(api.user.verify_admin)]) +async def recipe_trust_update( + trust_object: models.TrustUpdate_In, + autopkg_options: models.AutoPkgCMD = Depends(models.AutoPkgCMD) +): + + # Get recipe object + recipe_object = await models.Recipes.filter(recipe_id=trust_object.recipe_id).first() + +##### May need to create a trust_object db entry if one doesn't exist. + # Would be needed for a direct call to endpoint (API/Slack command) + # as those would not generate a failure first. + + if recipe_object: + queued_task = task.autopkg_update_trust.apply_async( + (trust_object.recipe_id, autopkg_options, trust_object.id), queue='autopkg', priority=6) + return { "result": "Queued background task" , "task_id": queued_task.id } + + else: + blocks = await api.build_msg.missing_recipe_msg(trust_object.recipe_id, "update trust for") + await api.bot.SlackBot.post_ephemeral_message( + trust_object.status_updated_by, blocks, + channel=trust_object.slack_channel, + text=f"Encountered error attempting to update trust for `{trust_object.recipe_id}`" + ) + + +@router.post("/trust/deny", summary="Do not approve trust changes", + description="This endpoint will update that database to show that the " + "changes to parent recipe(s) were not approved.", + dependencies=[Depends(api.user.verify_admin)]) +async def recipe_trust_deny(trust_object_id: int): + + trust_object = await models.TrustUpdate_Out.from_queryset_single( + models.TrustUpdates.get(id=trust_object_id)) + await api.send_msg.deny_trust_msg(trust_object) + + +@router.post("/trust/update/success", summary="Trust info was updated successfully", + description="Performs the necessary actions after trust info was successfully updated.", + dependencies=[Depends(api.user.verify_admin)]) +async def recipe_trust_update_success(trust_id: int): + + trust_object = await models.TrustUpdate_Out.from_queryset_single(models.TrustUpdates.get(id=trust_id)) + + # Re-enable the recipe + await update_by_recipe_id(trust_object.recipe_id, {"enabled": True}) + + if trust_object: + return await api.send_msg.update_trust_success_msg(trust_object) + + # else: +##### Post message to whomever requested the update? + # await bot.SlackBot.post_ephemeral_message( + # trust_object.status_updated_by, blocks, + # channel=trust_object.slack_channel, + # text=f"Encountered error attempting to update trust for `{trust_object.recipe_id}`" + # ) + + +@router.post("/trust/update/failed", summary="Failed to update recipe trust info", + description="Performs the necessary actions after trust info failed to update.", + dependencies=[Depends(api.user.verify_admin)]) +async def recipe_trust_update_failed(trust_id: int, msg: str): + + # Get DB entry + trust_object = await models.TrustUpdate_Out.from_queryset_single( + models.TrustUpdates.get(id=trust_id)) + + await api.send_msg.update_trust_error_msg(msg, trust_object) + + # Ensure the recipe is marked disabled + recipe_object = await models.Recipes.filter(recipe_id=trust_object.recipe_id).first() + recipe_object.enabled = False + await recipe_object.save() + return { "result": "Success" } + + +@router.post("/trust/verify/failed", summary="Parent trust info has changed", + description="Performs the necessary actions after parent recipe trust info has changed.", + dependencies=[Depends(api.user.verify_admin)]) +async def recipe_trust_verify_failed(recipe_id: str, diff_msg: str = Body()): + """ When `autopkg verify-trust-info ` fails """ + + # Create DB entry in TrustUpdates table + trust_object = await models.TrustUpdates.create(recipe_id=recipe_id) + + # Post Slack Message + await api.send_msg.trust_diff_msg(diff_msg, trust_object) + + # Mark the recipe disabled + recipe_object = await models.Recipes.filter(recipe_id=trust_object.recipe_id).first() + recipe_object.enabled = False + await recipe_object.save() + return { "result": "Success" } diff --git a/db/__init__.py b/pkgbot/api/slack/__init__.py similarity index 100% rename from db/__init__.py rename to pkgbot/api/slack/__init__.py diff --git a/api/slack/block_builders.py b/pkgbot/api/slack/block_builders.py similarity index 54% rename from api/slack/block_builders.py rename to pkgbot/api/slack/block_builders.py index 34a3178..df98d51 100644 --- a/api/slack/block_builders.py +++ b/pkgbot/api/slack/block_builders.py @@ -1,11 +1,16 @@ -#!/usr/local/autopkg/python - from fastapi import Depends -import utils -from db import models +from pkgbot import config +from pkgbot.utilities import common as utility +from pkgbot.db import models + + +log = utility.log +config = config.load_config() + +secure = "s" if config.PkgBot.get("enable_ssl") else "" +pkgbot_server = f"http{secure}://{config.PkgBot.get('host')}:{config.PkgBot.get('port')}" -log = utils.log async def brick_header(pkg_object: models.Package_In = Depends(models.Package_In)): @@ -24,17 +29,12 @@ async def brick_main(pkg_object: models.Package_In = Depends(models.Package_In)) "type": "section", "text": { "type": "mrkdwn", - "text": "*Name:* `{}`\n*Version:* `{}`\n*Package Name:* `{}`".format( - pkg_object.dict().get("name"), pkg_object.dict().get("version"), - pkg_object.dict().get("pkg_name", "Unknown") - ) + "text": f"*Name:* `{pkg_object.dict().get('name')}`\n*Version:* `{pkg_object.dict().get('version')}`\n*Package Name:* `{pkg_object.dict().get('pkg_name', 'Unknown')}`" }, "accessory": { "type": "image", - "image_url": "{}/iconservlet/?id={}".format(pkg_object.dict().get("jps_url"), - pkg_object.dict().get("icon_id") - ), - "alt_text": "computer thumbnail" + "image_url": f"{pkgbot_server}/static/icons/{pkg_object.dict().get('icon')}", + "alt_text": ":new:" } } @@ -46,8 +46,7 @@ async def brick_footer_dev(pkg_object: models.Package_In = Depends(models.Packag "elements": [ { "type": "mrkdwn", - "text": "*Dev*: {}\t*Uploaded by*: @{}".format( - pkg_object.dict().get("packaged_date"), "PkgBot") + "text": f"*Dev*: {pkg_object.dict().get('packaged_date')}\t*Uploaded by*: @{config.Slack.get('bot_name')}" } ] } @@ -57,9 +56,7 @@ async def brick_footer_promote(pkg_object: models.Package_In = Depends(models.Pa return { "type": "mrkdwn", - "text": "*Prod*: {}\t*Approved by*: @{}".format( - pkg_object.dict().get("promoted_date"), pkg_object.dict().get("status_updated_by") - ) + "text": f"*Prod*: {pkg_object.dict().get('promoted_date')}\t*Approved by*: @{pkg_object.dict().get('status_updated_by')}" } @@ -67,21 +64,18 @@ async def brick_footer_denied(pkg_object: models.Package_In = Depends(models.Pac return { "type": "mrkdwn", - "text": "*Denied by*: @{}\t*On*: @{}".format( - pkg_object.dict().get("status_updated_by"), pkg_object.dict().get("last_update")) + "text": f"*Denied by*: @{pkg_object.dict().get('status_updated_by')}\t*On*: {pkg_object.dict().get('last_update')}" } -async def brick_footer_denied_trust(error_object): +async def brick_footer_denied_trust(trust_object): return { "type": "context", "elements": [ { "type": "mrkdwn", - "text": "*Denied by*: @{}\t*On*: {}".format( - error_object.dict().get("status_updated_by"), - error_object.dict().get("last_update")) + "text": f"*Denied by*: @{trust_object.dict().get('status_updated_by')}\t*On*: {trust_object.dict().get('last_update')}" } ] } @@ -89,7 +83,7 @@ async def brick_footer_denied_trust(error_object): async def brick_button(pkg_object: models.Package_In = Depends(models.Package_In)): - return ( + return ( { "type": "section", "text": { @@ -108,7 +102,7 @@ async def brick_button(pkg_object: models.Package_In = Depends(models.Package_In "text": "Approve" }, "style": "primary", - "value": "Package:{}".format(pkg_object.dict().get("id")) + "value": f"Package:{pkg_object.dict().get('id')}" }, { "type": "button", @@ -118,7 +112,7 @@ async def brick_button(pkg_object: models.Package_In = Depends(models.Package_In "text": "Deny" }, "style": "danger", - "value": "Package:{}".format(pkg_object.dict().get("id")) + "value": f"Package:{pkg_object.dict().get('id')}" } ] } @@ -131,7 +125,7 @@ async def brick_error(recipe_id, error): "type": "header", "text": { "type": "plain_text", - "text": "Encountered an error in: {}".format(recipe_id), + "text": f"Encountered an error in: {recipe_id}", "emoji": True } }, @@ -139,51 +133,64 @@ async def brick_error(recipe_id, error): "type": "section", "text": { "type": "mrkdwn", - "text": "```{}```".format(error) + "text": f"```{error}```", + "verbatim": True }, "accessory": { "type": "image", - "image_url": "computer thumbnail", + "image_url": f"{pkgbot_server}/static/icons/{config.PkgBot.get('icon_error')}", "alt_text": ":x:" } + }, + { + "type": "actions", + "elements": [ + { + "type": "button", + "text": { + "type": "plain_text", + "emoji": True, + "text": "Acknowledge" + }, + "style": "danger", + "value": "Error:ack" + } + ] }] -async def brick_update_trust_success_msg(error_object): +async def brick_update_trust_success_msg(trust_object): return { "type": "section", "text": { "type": "mrkdwn", - "text": "Trust info was updated for: `{}`".format( - error_object.dict().get("recipe_id")) + "text": f"Trust info was updated for: `{trust_object.dict().get('recipe_id')}`", + "verbatim": True } } -async def brick_footer_update_trust_success_msg(error_object): +async def brick_footer_update_trust_success_msg(trust_object): return { "type": "context", "elements": [ { "type": "mrkdwn", - "text": "*Updated by*: @{}\t*On*: {}".format( - error_object.dict().get("status_updated_by"), - error_object.dict().get("last_update")) + "text": f"*Updated by*: @{trust_object.dict().get('status_updated_by')}\t*On*: {trust_object.dict().get('last_update')}" } ] } -async def brick_update_trust_error_msg(error_object, msg): +async def brick_update_trust_error_msg(trust_object, msg): return [{ "type": "header", "text": { "type": "plain_text", - "text": "Failed to update trust info for `{}`".format( - error_object.dict().get("recipe_id")), + "text": f"Failed to update trust info for `{trust_object.dict().get('recipe_id')}`", "emoji": True } }, @@ -191,11 +198,11 @@ async def brick_update_trust_error_msg(error_object, msg): "type": "section", "text": { "type": "mrkdwn", - "text": "```{}```".format(msg) + "text": f"```{msg}```" }, "accessory": { "type": "image", - "image_url": "computer thumbnail", + "image_url": f"{pkgbot_server}/static/icons/{config.PkgBot.get('icon_error')}", "alt_text": ":x:" } }] @@ -212,20 +219,20 @@ async def brick_deny_pkg(pkg_object: models.Package_In = Depends(models.Package_ } -async def brick_deny_trust(error_object): +async def brick_deny_trust(trust_object): return { "type": "section", "text": { "type": "mrkdwn", - "text": "Denied update to trust info for `{}`".format( - error_object.dict().get("recipe_id")) + "text": f"Denied update to trust info for `{trust_object.dict().get('recipe_id')}`", + "verbatim": True }, - # "accessory": { - # "type": "image", - # "image_url": "computer thumbnail", - # "alt_text": ":x:" - # } + "accessory": { + "type": "image", + "image_url": f"{pkgbot_server}/static/icons/{config.PkgBot.get('icon_denied')}", + "alt_text": ":denied:" + } } @@ -246,13 +253,14 @@ async def brick_trust_diff_main(recipe): "type": "section", "text": { "type": "mrkdwn", - "text": "*Recipe:* `{}`\n\n_Trust diff review required._\n\n".format(recipe) + "text": f"*Recipe:* `{recipe}`\n\n_Trust diff review required._\n\n", + "verbatim": True }, - # "accessory": { - # "type": "image", - # "image_url": "computer thumbnail", - # "alt_text": ":x:" - # } + "accessory": { + "type": "image", + "image_url": f"{pkgbot_server}/static/icons/{config.PkgBot.get('icon_warning')}", + "alt_text": ":warning:" + } } @@ -262,7 +270,8 @@ async def brick_trust_diff_content(error): "type": "section", "text": { "type": "mrkdwn", - "text": "*Diff Output:*```{}```".format(error) + "text": f"*Diff Output:*```{error}```", + "verbatim": True } } @@ -280,7 +289,7 @@ async def brick_trust_diff_button(id): "text": "Approve" }, "style": "primary", - "value": "Trust:{}".format(id) + "value": f"Trust:{id}" }, { "type": "button", @@ -290,7 +299,7 @@ async def brick_trust_diff_button(id): "text": "Deny" }, "style": "danger", - "value": "Trust:{}".format(id) + "value": f"Trust:{id}" } ] } @@ -311,13 +320,13 @@ async def unauthorized(user): "text": { "type": "mrkdwn", "text": "_*Warning:*_ you are not a PkgBot admin and are not authorized to " - "perform this action.\n\n`{}` will be reported to the " - "robot overloads.".format(user) + f"perform this action.\n\n`{user}` will be reported to the " + "robot overloads." }, "accessory": { "type": "image", - "image_url": "https://as1.ftcdn.net/jpg/01/81/82/24/500_F_181822453_iQYjSxsW1AXa8FHOA6ecgdZEmrBdfInD.jpg", - "alt_text": ":x:" + "image_url": f"{pkgbot_server}/static/icons/{config.PkgBot.get('icon_permission_denied')}", + "alt_text": ":denied:" } } ] @@ -330,11 +339,12 @@ async def missing_recipe_msg(recipe_id, text): "type": "section", "text": { "type": "mrkdwn", - "text": "Failed to {} `{}`".format(text, recipe_id), + "text": f"Failed to {text} `{recipe_id}`", + "verbatim": True }, "accessory": { "type": "image", - "image_url": "error", + "image_url": f"{pkgbot_server}/static/icons/{config.PkgBot.get('icon_error')}", "alt_text": ":x:" } } diff --git a/pkgbot/api/slack/bot.py b/pkgbot/api/slack/bot.py new file mode 100644 index 0000000..8659600 --- /dev/null +++ b/pkgbot/api/slack/bot.py @@ -0,0 +1,406 @@ +import hmac +import json +import ssl +import time +import certifi + +from fastapi import APIRouter, Depends, Request + +from slack_sdk.errors import SlackApiError +from slack_sdk.web.async_client import AsyncWebClient +from slack_sdk.webhook.async_client import AsyncWebhookClient + +from pkgbot import api, config, settings +from pkgbot.db import models +from pkgbot.utilities import common as utility + + +config = config.load_config() +log = utility.log + +SlackBot = None +ssl_context = ssl.create_default_context(cafile=certifi.where()) +router = APIRouter( + prefix = "/slackbot", + tags = ["slackbot"], + responses = settings.api.custom_responses +) + + +class SlackClient(object): + + def __init__(self, **kwargs): + self.token = kwargs["token"] + self.bot_name = kwargs["bot_name"] + self.channel = kwargs["channel"] + self.slack_id = kwargs["slack_id"] + + self.client = AsyncWebClient(token=self.token, ssl=ssl_context) + + + async def post_message(self, blocks: str, text: str = "Pkg status incoming..."): + + try: + return await self.client.chat_postMessage( + channel = self.channel, + text = text, + blocks = blocks, + username = self.bot_name + ) + + except SlackApiError as error: + log.error(f"Failed to post message: {error.response['error']}\n{error}") + return { "Failed to post message": error.response["error"] } + + + async def update_message(self, blocks: str, ts: str, text: str = "Updated message..."): + + try: + return await self.client.chat_update( + channel = self.channel, + text = text, + blocks = blocks, + ts = ts + ) + + except SlackApiError as error: + log.error(f"Failed to update {ts}: {error.response['error']}\n{error}") + return { f"Failed to update {ts}": error.response["error"] } + + + async def delete_message(self, ts: str): + + try: + await self.client.chat_delete(channel=self.channel, ts=ts) + return { "result": "Successfully deleted message" } + + except SlackApiError as error: + log.error(f"Failed to delete {ts}: {error.response['error']}\n{error}") + return { f"Failed to delete {ts}": error.response["error"] } + + + async def update_message_with_response_url( + self, response_url: str, blocks: str, text: str = "Pkg status update..."): + + try: + webhook = AsyncWebhookClient(url=response_url, ssl=ssl_context) + response = await webhook.send( + text = text, + blocks = blocks, + replace_original = True + ) + + if response.status_code != 200: + log.error( + f"Failed to update message! Status code: {response.status_code} | Error message: {response.body}") + else: + log.debug("Successfully updated msg via response_url") + + return response + + except SlackApiError as error: + log.error( + f"Failed to update {response_url}\nFull Error:\n{error}\nerror.dir: {dir(error)}\nerror.response['error']: {error.response['error']}") + return { f"Failed to update {response_url}": error.response["error"] } + + + async def post_ephemeral_message( + self, user: str, blocks: str, channel: str = None, text: str = "Private Note"): + + try: + return await self.client.chat_postEphemeral( + channel = channel or self.channel, + user = user, + text = text, + blocks = blocks, + username = self.bot_name + ) + + except SlackApiError as error: + log.error( + f"Failed to post ephemeral message: {error.response['error']}\nFull Error:\n{error}") + return { "Failed to post ephemeral message": error.response["error"] } + + + async def file_upload(self, content=None, file=None, filename=None, filetype=None, + title=None, text=None, thread_ts=None): + + try: + return await self.client.files_upload( + channels = self.channel, + content = content, + file = file, + filename = filename, + filetype = filetype, + title = title, + initial_comment = text, + thread_ts = thread_ts, + username = self.bot_name + ) + + except SlackApiError as error: + log.error(f"Failed to upload {file}: {error.response['error']}\nFull Error:\n{error}") + return { f"Failed to upload {file}": error.response["error"] } + + + async def invoke_reaction(self, **kwargs): + + kwargs |= { + "channel": kwargs.get("channel", self.channel), + "timestamp": str(kwargs.get("ts")) + } + + if "ts" in kwargs: + del kwargs["ts"] + + try: + return await self.client.api_call( + f"reactions.{kwargs.get('action')}", + params = kwargs + ) + + except SlackApiError as error: + error_key = error.response["error"] + + if not ( + kwargs.get("action") == "add" and error_key == "already_reacted" or + kwargs.get("action") == "remove" and error_key == "no_reaction" + ): + result = { f"Failed to invoke reaction on {kwargs.get('timestamp')}": error_key } + log.error(result) + return result + + else: + log.debug("Unable to perform the specified reaction action") + + + async def reaction(self, action: str = None, emoji: str = None, ts: str = None, **kwargs): + + # log.debug("Args:\n\taction: {}\n\temoji: {}\n\tts: {}\n\tkwargs: {}".format( + # action, emoji, ts, kwargs)) + + # log.debug("Checking current reactions") + + # Force checking if this works or not..... + # It's not.... + # response = await self.client.api_call( + # "reactions.get", + # http_verb = "GET", + # params = { + # 'channel': 'C0266ANUEJZ', + # 'timestamp': '1646121180.754269' + # } + # ) + +##### This is currently not working.... + # # Check if reaction exists or not... + # response = await self.invoke_reaction(action="get", ts=ts, http_verb="GET") + # # log.debug("forced get response:\n{}".format(response)) + # reactions = response.get("message").get("reactions") + + # for reaction in reactions: + # if ( + # reaction.get("name") == kwargs.get("emoji") and + # elf.slack_id in reaction.get("users") + # ): + # log.debug("Reaction already exists") + # exists = True + # break + + # log.debug("Reaction doesn't exist") + # exists = False + + # if ( + # action == "add" and exists == False or + # action == "remove" and exists == True + # ): + + return await self.invoke_reaction(action=action, name=emoji, ts=ts, **kwargs) + + +async def validate_slack_request(request: Request): + + try: + slack_timestamp = request.headers.get("X-Slack-Request-Timestamp") + + if abs(time.time() - int(slack_timestamp)) > 60 * 5: + # The request timestamp is more than five minutes from local time. + # It could be a replay attack, so let's ignore it. + return False + + slack_body = (await request.body()).decode("UTF-8") + signature_basestring = (f"v0:{slack_timestamp}:{slack_body}").encode() + + computed_signature = "v0=" + await utility.compute_hex_digest( + bytes(config.Slack.get("signing_secret"), "UTF-8"), + signature_basestring) + + slack_signature = request.headers.get("X-Slack-Signature") + + if hmac.compare_digest(computed_signature, slack_signature): + log.debug("Valid Slack message") + return True + + else: + log.warning("Invalid Slack message!") + return False + + except: + + return False + + +@router.on_event("startup") +async def startup_constructor(): + + global SlackBot + + SlackBot = SlackClient( + token = config.Slack.get("bot_token"), + bot_name = config.Slack.get("bot_name"), + channel = config.Slack.get("channel"), + slack_id = config.Slack.get("slack_id") + ) + + +@router.delete("/ts/{ts}", summary="Delete Slack message by timestamp", + description="Delete a Slack message by its timestamp.", + dependencies=[Depends(api.user.verify_admin)]) +async def delete_slack_message(timestamps: str | list): + + if isinstance(timestamps, str): + timestamps = [timestamps] + + results = {} + + for ts in timestamps: + result = await SlackBot.delete_message(str(ts)) + + try: + results[ts] = result.response['error'] + except Exception: + results[ts] = "Successfully deleted message" + + return results + + +@router.post("/receive", summary="Handles incoming messages from Slack", + description="This endpoint receives incoming messages from Slack and calls the required " + "actions based on the message after verifying the authenticity of the source.") +async def receive(request: Request): + + if await validate_slack_request(request): + + form_data = await request.form() + payload = form_data.get("payload") + payload_object = json.loads(payload) + + user_id = payload_object.get("user").get("id") + username = payload_object.get("user").get("username") + channel = payload_object.get("channel").get("id") + message_ts = payload_object.get("message").get("ts") + response_url = payload_object.get("response_url") + + button_text = payload_object.get("actions")[0].get("text").get("text") + button_value_type, button_value = ( + payload_object.get("actions")[0].get("value")).split(":") + + # log.debug("Incoming details:\n" + # f"user id: {user_id}\nusername: {username}\nchannel: {channel}\nmessage_ts: " + # f"{message_ts}\nresponse_url: {response_url}\nbutton_text: {button_text}\n" + # f"button_value_type: {button_value_type}\nbutton_value: {button_value}\n" + # ) + + slack_user_object = models.PkgBotAdmin_In( + username = username, + slack_id = user_id + ) + + user_that_clicked = await api.user.get_user(slack_user_object) + +##### Disabled for testing +##### Actually don't think this is needed........ + # try: + # if user_that_clicked.full_admin: + # full_admin = True + + # except: + # full_admin = False + + # Verify and perform action only if a PkgBotAdmin clicked the button + if user_that_clicked: # and full_admin: + + await SlackBot.reaction( + action = "add", + emoji = "gear", + ts = message_ts + ) + + if button_text == "Approve": + + if button_value_type == "Package": + log.info(f"PkgBotAdmin `{username}` is promoting package id: {button_value}") + + await api.package.update(button_value, + { "response_url": response_url, "status_updated_by": username }) + await api.package.promote_package(button_value) + + elif button_value_type == "Trust": + log.info( + f"PkgBotAdmin `{username}` has approved updates for trust id: {button_value}") + + updates = { + "response_url": response_url, + "status_updated_by": username, + "slack_ts": message_ts + } + + trust_object = await models.TrustUpdates.filter(id=button_value).first() + await models.TrustUpdates.update_or_create(updates, id=trust_object.id) + await api.recipe.recipe_trust_update(trust_object) + + elif button_text == "Deny": + + if button_value_type == "Package": + log.info(f"PkgBotAdmin `{username}` has denied package id: {button_value}") + + await api.package.update(button_value, + { "response_url": response_url, + "status_updated_by": username, + "status": "Denied", + "notes": "This package was not approved for use in production." } + ) + await api.package.deny_package(button_value) + + if button_value_type == "Trust": + log.info( + f"PkgBotAdmin `{username}` has denied updates for trust id: {button_value}") + + updates = { + "response_url": response_url, + "status_updated_by": username, + "status": "Denied" + } + + trust_object = await models.TrustUpdates.filter(id=button_value).first() + await models.TrustUpdates.update_or_create(updates, id=trust_object.id) + await api.recipe.recipe_trust_deny(button_value) + + elif button_text == "Acknowledge": + + if button_value_type == "Error": + log.info(f"PkgBotAdmin `{username}` has acknowledged error id: {button_value}") + return await SlackBot.delete_message(str(message_ts)) + + else: + log.warning(f"Unauthorized user: `{username}` [{user_id}].") + + blocks = await api.build_msg.unauthorized_msg(username) + await SlackBot.post_ephemeral_message( + user_id, blocks, channel=channel, text="WARNING: Unauthorized access attempted") + + return { "result": 200 } + + else: + log.warning("PkgBot received an invalid request!") + return { "result": 500 } diff --git a/pkgbot/api/slack/build_msg.py b/pkgbot/api/slack/build_msg.py new file mode 100644 index 0000000..ae79c53 --- /dev/null +++ b/pkgbot/api/slack/build_msg.py @@ -0,0 +1,152 @@ +import json + +from fastapi import APIRouter, Depends + +from pkgbot import api, settings +from pkgbot.db import models +from pkgbot.utilities import common as utility + + +log = utility.log +SlackBot = None +router = APIRouter( + prefix = "/slackbot/build", + tags = ["slackbot"], + dependencies = [Depends(api.user.verify_admin)], + responses = settings.api.custom_responses +) + +async def format_json(the_json, indent=4): + return json.dumps(the_json, indent=indent) + + +@router.get("/new-pkg-msg", summary="Build new package message", + description="Builds a 'new package' message for Slack after " + "a .pkg has been added to the dev environment.") +async def new_pkg_msg(pkg_object: models.Package_In = Depends(models.Package_In)): + + blocks = [ + await api.block_builders.brick_header(pkg_object), + await api.block_builders.brick_main(pkg_object), + await api.block_builders.brick_footer_dev(pkg_object) + ] + + for brick in await api.block_builders.brick_button(pkg_object): + blocks.append(brick) + + return await format_json(blocks) + + +@router.get("/recipe-error", summary="Build error message", + description="Builds an 'error' message for Slack after a recipe has returned an error.") +async def recipe_error_msg(recipe_id: str, id: int, error: dict): + + redacted_error = await utility.replace_sensitive_strings(error) + blocks = await api.block_builders.brick_error(recipe_id, format_json(redacted_error)) + return format_json(blocks) + + +@router.get("/trust-diff-msg", summary="Build trust diff message", + description="Builds a message with the trust diff contents " + "for Slack after a recipe's parent trust info has changed.") +async def trust_diff_msg(id: int, recipe: str, diff_msg: str = None): + + blocks = [ + await api.block_builders.brick_trust_diff_header(), + await api.block_builders.brick_trust_diff_main(recipe) + ] + + if diff_msg: + blocks.append(await api.block_builders.brick_trust_diff_content(diff_msg)) + + blocks.append(await api.block_builders.brick_trust_diff_button(id)) + return format_json(blocks) + + +@router.get("/deny-pkg-msg", summary="Build deny package message", + description="Builds a 'package denied message' for Slack when " + "a .pkg is not approved for the production environment.") +async def deny_pkg_msg(pkg_object: models.Package_In = Depends(models.Package_In)): + + brick_footer = await api.block_builders.brick_footer_dev(pkg_object) + brick_footer.get("elements").append( + await api.block_builders.brick_footer_denied(pkg_object) + ) + + blocks = [ + await api.block_builders.brick_deny_pkg(pkg_object), + await api.block_builders.brick_main(pkg_object), + brick_footer + ] + + return format_json(blocks) + + +@router.get("/deny-trust-msg", summary="Build deny trust message", + description="Builds an message for Slack stating a recipe's " + "parent trust info changes were not approved.") +async def deny_trust_msg( + trust_object: models.TrustUpdate_In = Depends(models.TrustUpdate_In)): + + blocks = [ + await api.block_builders.brick_deny_trust(trust_object), + await api.block_builders.brick_footer_denied_trust(trust_object) + ] + + return format_json(blocks) + + +@router.get("/promote-msg", summary="Build promoted package message", + description="Builds a 'package has been promoted' message for Slack " + "after a .pkg has been approved for the production environment.") +async def promote_msg(pkg_object: models.Package_In = Depends(models.Package_In)): + + brick_footer = await api.block_builders.brick_footer_dev(pkg_object) + brick_footer.get("elements").append( + await api.block_builders.brick_footer_promote(pkg_object) + ) + + blocks = [ + await api.block_builders.brick_main(pkg_object), + brick_footer + ] + + return format_json(blocks) + + +@router.get("/update-trust-success-msg", summary="Build trust update success message", + description="Builds a 'success' message for Slack when a " + "recipe's trust info is updated successfully.") +async def update_trust_success_msg( + trust_object: models.TrustUpdate_In = Depends(models.TrustUpdate_In)): + + blocks = [ + await api.block_builders.brick_update_trust_success_msg(trust_object), + await api.block_builders.brick_footer_update_trust_success_msg(trust_object) + ] + + return format_json(blocks) + + +@router.get("/update-trust-error-msg", summary="Build trust update error message", + description="Builds an 'error' message for Slack when a recipe's trust info fails to update.") +async def update_trust_error_msg(msg: str, + trust_object: models.TrustUpdate_In = Depends(models.TrustUpdate_In)): + + return format_json([await api.block_builders.brick_update_trust_error_msg(trust_object, msg)]) + + +@router.get("/unauthorized-msg", summary="Build unauthorized message", + description="Builds a 'unauthorized' message for Slack when a user attempts to " + "perform a Slack interaction with PkgBot that they're not authorized to perform.") +async def unauthorized_msg(user): + + return format_json(await api.block_builders.unauthorized(user)) + + +@router.get("/missing-recipe-msg", summary="Build unauthorized message", + description="Builds a 'missing recipe' message for Slack when unable to locate " + "a recipe for a requested action.") +async def missing_recipe_msg(recipe_id, text): + + return format_json(await api.block_builders.missing_recipe_msg(recipe_id, text)) diff --git a/pkgbot/api/slack/send_msg.py b/pkgbot/api/slack/send_msg.py new file mode 100644 index 0000000..ef9a3d8 --- /dev/null +++ b/pkgbot/api/slack/send_msg.py @@ -0,0 +1,185 @@ +import json + +from fastapi import APIRouter, Depends + +from pkgbot import api, settings +from pkgbot.db import models +from pkgbot.utilities import common as utility + + +log = utility.log +SlackBot = None +router = APIRouter( + prefix = "/slackbot/send", + tags = ["slackbot"], + dependencies = [Depends(api.user.verify_admin)], + responses = settings.api.custom_responses +) + +max_content_size = 1500 + + +@router.post("/dev-msg", summary="Send new package message", + description="Sends a 'new package' message to Slack after " + "a .pkg has been added to the dev environment.") +async def new_pkg_msg(pkg_object: models.Package_In = Depends(models.Package_In)): + + return await api.bot.SlackBot.post_message( + await api.build_msg.new_pkg_msg(pkg_object), + text=f"Update for {pkg_object.name}" + ) + + +@router.post("/promote-msg", summary="Send promoted package message", + description="Sends a 'package has been promoted' message to Slack " + "after a .pkg has been approved for the production environment.") +async def promote_msg(pkg_object: models.Package_In = Depends(models.Package_In)): + + blocks = await api.build_msg.promote_msg(pkg_object) + text = f"{pkg_object.pkg_name} was promoted to production" + + result = await api.bot.SlackBot.update_message_with_response_url( + pkg_object.response_url, + blocks, + text=text + ) + + # If the first method fails, try the alternate + if json.loads(result.body).get("error") == "expired_url": + await api.bot.SlackBot.update_message( + blocks, + pkg_object.slack_ts, + text=text + ) + + return await api.bot.SlackBot.reaction( + action = "remove", + emoji = "gear", + ts = pkg_object.slack_ts + ) + + +@router.post("/recipe-error-msg", summary="Send error message", + description="Sends an 'error' message to Slack after a recipe has returned an error.") +async def recipe_error_msg(recipe_id: str, id: int, error: str): + + blocks = await api.build_msg.recipe_error_msg(recipe_id, id, error) + return await api.bot.SlackBot.post_message(blocks, text=f"Encountered error in {recipe_id}") + + +@router.post("/trust-diff-msg", summary="Send trust diff message", + description="Sends a message with the trust diff contents to " + "Slack after a recipe's parent trust info has changed.") +async def trust_diff_msg( + diff_msg: str, trust_object: models.TrustUpdate_In = Depends(models.TrustUpdate_In)): + + if len(diff_msg) > max_content_size: + blocks = await api.build_msg.trust_diff_msg(trust_object.id, trust_object.recipe_id) + else: + blocks = await api.build_msg.trust_diff_msg( + trust_object.id, trust_object.recipe_id, diff_msg) + + response = await api.bot.SlackBot.post_message( + blocks, + text=f"Trust verification failed for `{trust_object.recipe_id}`" + ) + + trust_object.slack_ts = response.get('ts') + await trust_object.save() + + if len(diff_msg) > max_content_size: + response = await api.bot.SlackBot.file_upload( + content = diff_msg, + filename = f"{trust_object.recipe_id}.diff", + filetype = "diff", + title = trust_object.recipe_id, + text = f"Diff Output for {trust_object.recipe_id}", + thread_ts = trust_object.slack_ts + ) + + return response + + +@router.put("/update-trust-success-msg", summary="Send trust update success message", + description="Sends a 'success' message to Slack when " + "a recipe's trust info is updated successfully.") +async def update_trust_success_msg( + trust_object: models.TrustUpdate_In = Depends(models.TrustUpdate_In)): + + blocks = await api.build_msg.update_trust_success_msg(trust_object) + + response = await api.bot.SlackBot.update_message_with_response_url( + trust_object.dict().get("response_url"), + blocks, + text=f"Successfully updated trust info for {trust_object.recipe_id}" + ) + + if response.status_code == 200: + await api.bot.SlackBot.reaction( + action = "remove", + emoji = "gear", + ts = trust_object.slack_ts + ) + + return response + + +@router.put("/update-trust-error-msg", summary="Send trust update error message", + description="Sends an 'error' message to Slack when a recipe's trust info fails to update.") +async def update_trust_error_msg(msg: str, + trust_object: models.TrustUpdate_In = Depends(models.TrustUpdate_In)): + + blocks = await api.build_msg.update_trust_error_msg(msg, trust_object) + + return await api.bot.SlackBot.update_message_with_response_url( + trust_object.dict().get("response_url"), + blocks, + text=f"Failed to update trust info for {trust_object.recipe_id}" + ) + + +@router.put("/deny-pkg-msg", summary="Send deny package message", + description="Sends a 'package denied message' to Slack when " + "a .pkg is not approved for the production environment.") +async def deny_pkg_msg(pkg_object: models.Package_In = Depends(models.Package_In)): + + blocks = await api.build_msg.deny_pkg_msg(pkg_object) + + response = await api.bot.SlackBot.update_message_with_response_url( + pkg_object.dict().get("response_url"), + blocks, + text=f"{pkg_object.pkg_name} was not approved for production" + ) + + if response.status_code == 200: + await api.bot.SlackBot.reaction( + action = "remove", + emoji = "gear", + ts = pkg_object.slack_ts + ) + + return response + + +@router.put("/deny-trust-msg", summary="Send deny trust message", + description="Send an message to Slack stating a recipe's " + "parent trust info changes were not approved.") +async def deny_trust_msg( + trust_object: models.TrustUpdate_In = Depends(models.TrustUpdate_In)): + + blocks = await api.build_msg.deny_trust_msg(trust_object) + + response = await api.bot.SlackBot.update_message_with_response_url( + trust_object.dict().get("response_url"), + blocks, + text=f"Trust info for {trust_object.recipe_id} was not approved" + ) + + if response.status_code == 200: + await api.bot.SlackBot.reaction( + action = "remove", + emoji = "gear", + ts = trust_object.slack_ts + ) + + return response diff --git a/api/user.py b/pkgbot/api/user.py similarity index 78% rename from api/user.py rename to pkgbot/api/user.py index 349b57d..581b105 100644 --- a/api/user.py +++ b/pkgbot/api/user.py @@ -1,19 +1,17 @@ -#!/usr/local/autopkg/python - from fastapi import APIRouter, Depends, HTTPException, Response, status from fastapi.security import OAuth2PasswordBearer -import utils -from db import models -from api import settings +from pkgbot import settings +from pkgbot.db import models +from pkgbot.utilities import common as utility -log = utils.log +log = utility.log oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token") router = APIRouter( prefix = "/user", tags = ["user"], - responses = settings.custom_responses + responses = settings.api.custom_responses ) @@ -44,18 +42,18 @@ async def get_current_user(token: str = Depends(oauth2_scheme)): ) -async def verify_admin(response: Response, +async def verify_admin(response: Response, user: models.PkgBotAdmin_In = Depends(get_current_user)): if not user.full_admin: log.debug("User is NOT an admin") - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="You are not authorized to utilize this endpoint.") # log.debug("User is an admin") -@router.get("s/", summary="Get all users", description="Get all users in the database.", +@router.get("s/", summary="Get all users", description="Get all users in the database.", dependencies=[Depends(verify_admin)]) async def get_users(): @@ -64,30 +62,30 @@ async def get_users(): return { "total": len(users), "users": users } -@router.post("/create", summary="Create a user", description="Creates a new PkgBot user.", - dependencies=[Depends(verify_admin)], response_model=models.PkgBotAdmin_Out, +@router.post("/create", summary="Create a user", description="Creates a new PkgBot user.", + dependencies=[Depends(verify_admin)], response_model=models.PkgBotAdmin_Out, response_model_exclude={ "slack_id", "jps_token" }, response_model_exclude_unset=True) -async def create_user(response: Response, +async def create_user(response: Response, user: models.PkgBotAdmin_In = Depends(models.PkgBotAdmin_In)): if await get_user(user): - raise HTTPException(status_code=status.HTTP_409_CONFLICT, - detail="The user `{}` already exists.".format(user.username)) + raise HTTPException(status_code=status.HTTP_409_CONFLICT, + detail=f"The user `{user.username}` already exists.") return await create_or_update_user(user) -@router.put("/update", summary="Update a user", description="Updates an existing PkgBot user.", - dependencies=[Depends(verify_admin)], response_model=models.PkgBotAdmin_Out, +@router.put("/update", summary="Update a user", description="Updates an existing PkgBot user.", + dependencies=[Depends(verify_admin)], response_model=models.PkgBotAdmin_Out, response_model_exclude={ "slack_id", "jps_token" }, response_model_exclude_unset=True) -async def update_user(response: Response, +async def update_user(response: Response, user: models.PkgBotAdmin_In = Depends(models.PkgBotAdmin_In)): return await create_or_update_user(user) -@router.get("/whoami", summary="Get user's info", - description="Get the currently authenticated users information.", +@router.get("/whoami", summary="Get user's info", + description="Get the currently authenticated users information.", dependencies=[Depends(get_current_user)], response_model=models.PkgBotAdmin_Out) async def whoami(user: models.PkgBotAdmin_In = Depends(get_current_user)): diff --git a/pkgbot/api/views.py b/pkgbot/api/views.py new file mode 100644 index 0000000..a786b19 --- /dev/null +++ b/pkgbot/api/views.py @@ -0,0 +1,122 @@ +import os +import shutil + +from datetime import datetime + +from fastapi import APIRouter, Depends, Request, UploadFile +from fastapi.responses import HTMLResponse +from fastapi.templating import Jinja2Templates + +from pkgbot import config +from pkgbot.utilities import common as utility +from pkgbot.api import auth, package, recipe + + +log = utility.log +config = config.load_config() + + +def template_filter_datetime(date, date_format="%Y-%m-%d %I:%M:%S"): + + if date: + converted = datetime.fromisoformat(str(date)) + return converted.strftime(date_format) + + +session = { "logged_in": False } +templates = Jinja2Templates(directory=config.PkgBot.get("jinja_templates")) +templates.env.filters["strftime"] = template_filter_datetime +router = APIRouter( + tags = ["view"], + include_in_schema = False +) + + +@router.get("/", response_class=HTMLResponse) +async def index(request: Request): + + if request.state.user: + session["logged_in"] = True + else: + session["logged_in"] = False + + return templates.TemplateResponse("index.html", { "request": request, "session": session }) + + +# @router.get("/login", response_class=HTMLResponse) +# async def userlogin(request: Request): + +# return templates.TemplateResponse("login.html", { "request": request, "session": session }) + + +@router.get("/packages", response_class=HTMLResponse) +async def package_history(request: Request, user = Depends(auth.login_manager)): + + session["logged_in"] = True + pkgs = await package.get_packages() + + table_headers = [ + "", "", "Name", "Version", "Status", "Updated By", + "Packaged", "Promoted", "COMMON", "Flags", "Notes" + ] + + return templates.TemplateResponse("packages.html", + { "request": request, "session": session, + "table_headers": table_headers, "packages": pkgs.get("packages") }) + + +@router.get("/package/{id}", response_class=HTMLResponse) +async def get_package(request: Request, user = Depends(auth.login_manager)): + + session["logged_in"] = True + pkg = await package.get_package_by_id(request.path_params['id']) + + return templates.TemplateResponse("package.html", + { "request": request, "session": session, "package": pkg }) + + +@router.get("/edit/{id}", response_class=HTMLResponse) +async def edit(request: Request, user = Depends(auth.login_manager)): + + pkg = await package.get_package_by_id(request.path_params['id']) + + return templates.TemplateResponse("edit.html", + { "request": request, "session": session, "package": pkg }) + + +@router.get("/recipes", response_class=HTMLResponse) +async def recipe_list(request: Request, user = Depends(auth.login_manager)): + + session["logged_in"] = True + pkgs = await recipe.get_recipes() + + table_headers = [ "ID", "Recipe ID", "Name", "Enable", "Manual Only", + "Pkg Only", "Last Ran", "Schedule", "Notes" ] + + return templates.TemplateResponse("recipes.html", + { "request": request, "session": session, + "table_headers": table_headers, "recipes": pkgs.get("recipes") }) + + +@router.get("/recipe/{id}", response_class=HTMLResponse) +async def get_recipe(request: Request, user = Depends(auth.login_manager)): + + session["logged_in"] = True + pkg = await recipe.get_by_id(request.path_params['id']) + + return templates.TemplateResponse("recipe.html", + { "request": request, "session": session, "recipe": pkg }) + + +@router.post("/icons") +async def upload_icon(icon: UploadFile, user = Depends(auth.login_manager)): + + pkg_dir = os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir)) + + try: + with open(f"{pkg_dir}/static/icons/{icon.filename}", "wb") as icon_obj: + shutil.copyfileobj(icon.file, icon_obj) + finally: + await icon.close() + + return { "results": 200, "icon": icon.filename } diff --git a/pkgbot/config.py b/pkgbot/config.py new file mode 100644 index 0000000..113a544 --- /dev/null +++ b/pkgbot/config.py @@ -0,0 +1,77 @@ +import argparse +import os +import yaml + +from functools import lru_cache + +from pydantic import BaseSettings +from pydantic.env_settings import SettingsSourceCallable + + +def yml_config_setting(settings: BaseSettings): + + with open(settings.__config__.config_file) as file: + return yaml.safe_load(file) + + +def instantiate_config(): + + class PkgBot_Configuration(BaseSettings): + + class Config: + + arbitrary_types_allowed = True + config_file = os.environ.get("PKGBOT_CONFIG") + env_prefix = "PKGBOT_" + # allow extra options so we can detect legacy configuration files + extra = "allow" + + @classmethod + def customise_sources( + cls, + init_settings: SettingsSourceCallable, + env_settings: SettingsSourceCallable, + file_secret_settings: SettingsSourceCallable, + ): + # Add load from yml file, change priority and remove file secret option + return init_settings, yml_config_setting, env_settings + + return PkgBot_Configuration + + +@lru_cache() +def load_config(cli_args=None): + + # print(f'PkgBot.Load_Config:\n\tAll calling args: {cli_args}') + parser = argparse.ArgumentParser(description="PkgBot Main.") + parser.add_argument( + '--pkgbot_config', '-pc', metavar='./pkgbot.config', default=None, + type=str, required=False, help='A defined pkgbot configuration file.' + ) + args, _ = parser.parse_known_args(cli_args) + # print(f'PkgBot.Load_Config:\n\tArgparse args: {args}') + + pkg_dir = os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir)) + + config_file = ( + args.pkgbot_config or + os.environ.get( + "PKGBOT_CONFIG", + os.path.join(pkg_dir, "Settings/pkgbot_config.yaml") + ) + ) + + if not os.path.exists(config_file): + raise("The specified config file does not exist.") + + os.environ["PKGBOT_CONFIG"] = config_file + PkgBot_Configuration = instantiate_config() + config = PkgBot_Configuration() + + if config.AutoPkg.get('binary') is None: + config.AutoPkg["binary"] = "/usr/local/bin/autopkg" + + if config.Git.get('binary') is None: + config.Git["binary"] = "/usr/bin/git" + + return config diff --git a/execute/__init__.py b/pkgbot/db/__init__.py similarity index 100% rename from execute/__init__.py rename to pkgbot/db/__init__.py diff --git a/db/models.py b/pkgbot/db/models.py similarity index 58% rename from db/models.py rename to pkgbot/db/models.py index 7b04167..75b6422 100644 --- a/db/models.py +++ b/pkgbot/db/models.py @@ -1,5 +1,4 @@ -#!/usr/local/autopkg/python - +from pydantic import BaseModel from tortoise import fields from tortoise.models import Model from tortoise.contrib.pydantic import pydantic_model_creator @@ -11,10 +10,11 @@ class Packages(Model): name = fields.CharField(64) version = fields.CharField(64) pkg_name = fields.CharField(256, null=True) - jps_url = fields.CharField(64, null=True) - icon_id = fields.CharField(64, null=True) - jps_id_dev = fields.IntField(unique=True, null=True) - jps_id_prod = fields.IntField(unique=True, null=True) + jps_url = fields.CharField(64, null=True) # DEPRECATED | TO BE REMOVED + icon_id = fields.CharField(64, null=True) # DEPRECATED | TO BE REMOVED + icon = fields.CharField(1024, null=True) + jps_id_dev = fields.IntField(unique=True, null=True) # DEPRECATED | TO BE REMOVED + jps_id_prod = fields.IntField(unique=True, null=True) # DEPRECATED | TO BE REMOVED packaged_date = fields.DatetimeField(auto_now_add=True) promoted_date = fields.DatetimeField(null=True, default=None) last_update = fields.DatetimeField(auto_now=True) @@ -36,13 +36,26 @@ class Recipes(Model): recipe_id = fields.CharField(512, unique=True) name = fields.CharField(64) enabled = fields.BooleanField(default=True) + manual_only = fields.BooleanField(default=False) pkg_only = fields.BooleanField(default=False) last_ran = fields.DatetimeField(null=True, default=None) + recurring_fail_count = fields.IntField(null=True, default=0) schedule = fields.IntField(default=0) notes = fields.CharField(4096, null=True) Recipe_Out = pydantic_model_creator(Recipes, name="Recipe_Out") Recipe_In = pydantic_model_creator(Recipes, name="Recipe_In", exclude_readonly=True) +# Recipe_Filter = pydantic_model_creator( +# Recipes, name="Recipe_Filter", exclude_readonly=True, +# exclude=('id', "recipe_id", "name", "last_ran", "notes"), +# optional=( "enabled", "manual_only", "pkg_only", "recurring_fail_count", "schedule")) + +class Recipe_Filter(BaseModel): + enabled: bool | None = None + manual_only: bool | None = None + pkg_only: bool | None = None + recurring_fail_count: int | None = None + schedule: int | None = None class PkgBotAdmins(Model): @@ -72,3 +85,37 @@ class ErrorMessages(Model): ErrorMessage_Out = pydantic_model_creator(ErrorMessages, name="ErrorMessage_Out") ErrorMessage_In = pydantic_model_creator( ErrorMessages, name="ErrorMessage_In", exclude_readonly=True) + + +class TrustUpdates(Model): + id = fields.IntField(pk=True) + recipe_id = fields.CharField(1024) + slack_ts = fields.CharField(32, null=True) + slack_channel = fields.CharField(32, null=True) + response_url = fields.CharField(1024, null=True) + status_updated_by = fields.CharField(64, default="PkgBot") + last_update = fields.DatetimeField(auto_now=True) + status = fields.CharField(64, null=True) + +TrustUpdate_Out = pydantic_model_creator(TrustUpdates, name="TrustUpdate_Out") +TrustUpdate_In = pydantic_model_creator( + TrustUpdates, name="TrustUpdate_In", exclude_readonly=True) + + +##### May make this a Tortoise Model, to support tracking who/what generated each command +class AutoPkgCMD(BaseModel): + ignore_parent_trust: bool = False + match_pkg: str | None = None + # overrides: str | None = None # Not yet supported + pkg_only: bool = False + promote: bool = False + verbose: str = "vvv" + + +class AutoPkgTaskResults(BaseModel): + event: str + event_id: str = "" + recipe_id: str + success: str + stdout: str + stderr: str diff --git a/pkgbot/settings/__init__.py b/pkgbot/settings/__init__.py new file mode 100644 index 0000000..6ed0a52 --- /dev/null +++ b/pkgbot/settings/__init__.py @@ -0,0 +1,3 @@ +from . import api +from . import celery +from . import db diff --git a/api/settings.py b/pkgbot/settings/api.py similarity index 69% rename from api/settings.py rename to pkgbot/settings/api.py index d136146..3980044 100644 --- a/api/settings.py +++ b/pkgbot/settings/api.py @@ -1,6 +1,4 @@ -#!/usr/local/autopkg/python - -from fastapi import status +from fastapi import status from tortoise.contrib.fastapi import HTTPNotFoundError @@ -31,27 +29,13 @@ } ] -TORTOISE_CONFIG = { - "connections": { - # "default": "sqlite://:memory:" - "default": "sqlite://db/db.sqlite3" - }, - "apps": { - "app": { - "models": [ "db.models" ], - "default_connection": "default" - } - }, - "use_tz": False, - "timezone": "America/Phoenix" -} custom_responses = { # 404: {"description": "Item not found"}, # 302: {"description": "The item was moved"}, - status.HTTP_401_UNAUTHORIZED: { "description": + status.HTTP_401_UNAUTHORIZED: { "description": "You must authenticate before utilizing this endpoint." }, - status.HTTP_403_FORBIDDEN: { "description": + status.HTTP_403_FORBIDDEN: { "description": "You are not authorized to utilize this endpoint." }, status.HTTP_404_NOT_FOUND: { "model": HTTPNotFoundError }, status.HTTP_409_CONFLICT: { "description": "The object already exists." } diff --git a/pkgbot/settings/celery.py b/pkgbot/settings/celery.py new file mode 100644 index 0000000..6915239 --- /dev/null +++ b/pkgbot/settings/celery.py @@ -0,0 +1,48 @@ +import os + +from functools import lru_cache +from kombu import Queue + +from pkgbot import config + + +config = config.load_config() + + +def route_task(name, args, kwargs, options, task=None, **kw): + + if ":" in name: + queue, _ = name.split(":") + return {"queue": queue} + + return {"queue": "celery"} + + +class CeleryConfig: + broker_url: str = os.environ.get("broker_url", config.Celery.get("broker_url")) + # result_backend: str = os.environ.get("result_backend", "rpc://") + result_backend: str = os.environ.get("result_backend", f"db+sqlite:///{config.Database.get('location')}") + + task_queues: list = ( + Queue("pkgbot", queue_arguments={"x-max-priority": 10}), + Queue("autopkg", queue_arguments={"x-max-priority": 10}), + Queue("git", queue_arguments={"x-max-priority": 10}), + ) + + task_routes = (route_task,) + task_default_priority = 5 + task_queue_max_priority = 10 + task_acks_late = True + worker_prefetch_multiplier = 1 + + +@lru_cache() +def get_settings(): + + config_cls_dict = { "pkgbot": CeleryConfig, } + config_name = os.environ.get("CELERY_CONFIG", "pkgbot") + config_cls = config_cls_dict[config_name] + return config_cls() + + +settings = get_settings() diff --git a/pkgbot/settings/db.py b/pkgbot/settings/db.py new file mode 100644 index 0000000..29d61b9 --- /dev/null +++ b/pkgbot/settings/db.py @@ -0,0 +1,19 @@ +from pkgbot import config + + +config = config.load_config() + +TORTOISE_CONFIG = { + "connections": { + # "default": "sqlite://:memory:" + "default": f"sqlite:{config.Database.get('location')}" + }, + "apps": { + "app": { + "models": [ "pkgbot.db.models" ], + "default_connection": "default" + } + }, + "use_tz": False, + "timezone": "America/Phoenix" +} diff --git a/pkgbot/static/icons/denied.png b/pkgbot/static/icons/denied.png new file mode 100644 index 0000000..909e984 Binary files /dev/null and b/pkgbot/static/icons/denied.png differ diff --git a/pkgbot/static/icons/error.png b/pkgbot/static/icons/error.png new file mode 100644 index 0000000..15aaf4a Binary files /dev/null and b/pkgbot/static/icons/error.png differ diff --git a/pkgbot/static/icons/metapackage.png b/pkgbot/static/icons/metapackage.png new file mode 100644 index 0000000..dd66cad Binary files /dev/null and b/pkgbot/static/icons/metapackage.png differ diff --git a/pkgbot/static/icons/package.png b/pkgbot/static/icons/package.png new file mode 100644 index 0000000..b8fb9f7 Binary files /dev/null and b/pkgbot/static/icons/package.png differ diff --git a/pkgbot/static/icons/permission_denied.png b/pkgbot/static/icons/permission_denied.png new file mode 100644 index 0000000..9858a58 Binary files /dev/null and b/pkgbot/static/icons/permission_denied.png differ diff --git a/pkgbot/static/icons/warning.png b/pkgbot/static/icons/warning.png new file mode 100644 index 0000000..bc26f6b Binary files /dev/null and b/pkgbot/static/icons/warning.png differ diff --git a/pkgbot/tasks/__init__.py b/pkgbot/tasks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pkgbot/tasks/task.py b/pkgbot/tasks/task.py new file mode 100644 index 0000000..b1009a9 --- /dev/null +++ b/pkgbot/tasks/task.py @@ -0,0 +1,490 @@ +import asyncio +import hashlib +import json +import os +import requests + +from datetime import datetime + +import git +from celery import Celery, group, chain + +from pkgbot import config, settings +from pkgbot.db import models +from pkgbot.tasks import task_utils +from pkgbot.utilities import common as utility + + +config = config.load_config() +log = utility.log +celery = Celery() +celery.config_from_object(settings.celery.settings) + + +@celery.task(name="pkgbot:send_webhook", namebind=True) +def send_webhook(task_id): + """ Sends webhook after a task is complete. """ + + pkgbot_server, headers = task_utils.api_url_helper() + data = { "task_id": task_id } + + headers["x-pkgbot-signature"] = asyncio.run(utility.compute_hex_digest( + config.PkgBot.get("webhook_secret").encode("UTF-8"), + str(data).encode("UTF-8"), + hashlib.sha512 + )) + + requests.post(f"{pkgbot_server}/autopkg/receive", + headers=headers, + data=json.dumps(data), + ) + + +@celery.task(name="git:pull_private_repo", bind=True) +def git_pull_private_repo(self): + """Perform a `git pull` for the local private repo""" + + log.info("Checking for private repo updates...") + + repo_primary_branch = config.Git.get("repo_primary_branch") + repo_push_branch = config.Git.get("repo_push_branch") + stashed = False + use_remote_push = False + + try: + + private_repo = git.Repo(os.path.expanduser(config.Git.get("local_repo_dir"))) + + if private_repo.is_dirty(): + _ = private_repo.git.stash() + stashed = True + + active_branch = private_repo.active_branch + local_branches = [ branch.name for branch in private_repo.branches ] + + _ = private_repo.remotes.origin.fetch() + # remote_branches = [ ref.name.split("/")[1] for ref in private_repo.remote().refs ] + + if active_branch != repo_primary_branch: + _ = private_repo.git.checkout(repo_primary_branch) + + if repo_push_branch in local_branches: + push_branch_commits_ahead, push_branch_commits_behind = task_utils.compare_branch_heads( + private_repo, repo_push_branch, repo_primary_branch) + + if push_branch_commits_ahead == 0 and push_branch_commits_behind > 0: + # private_repo.delete_head(repo_push_branch) + # For safety, just renaming the branch for now and after a bit of real world + # testing, switch to deleting the branch + timestamp = asyncio.run( + utility.datetime_to_string(str(datetime.now()), "%Y-%m-%d_%I-%M-%S")) + private_repo.branches[repo_push_branch].rename( + f"{repo_push_branch}_{timestamp}") + + elif push_branch_commits_ahead > 0: + use_remote_push = True + + primary_branch_commits_ahead, primary_branch_commits_behind = task_utils.compare_branch_heads( + private_repo, repo_primary_branch, repo_primary_branch) + + if primary_branch_commits_ahead != 0 and repo_primary_branch != repo_push_branch: + log.error("Local primary branch is ahead of remote primary branch.") + raise + + _ = private_repo.remotes.origin.pull() + + results_git_pull_command = { + "stdout": "Success", + "stderr": "", + "status": 0, + "success": True + } + + log.info("Successfully updated private repo") + + except Exception as error: + + results_git_pull_command = { + "stdout": "Error occurred during git operation", + "stderr": error, + "status": 1, + "success": False + } + + finally: + + if use_remote_push: + _ = private_repo.git.checkout(repo_push_branch) + + if stashed: + private_repo.git.stash("pop") + +##### This if statement can be removed after further real world testing... + if not results_git_pull_command["success"]: + log.error(f"stdout:\n{results_git_pull_command['stdout']}") + log.error(f"stderr:\n{results_git_pull_command['stderr']}") + + results_git_pull_command |= { + "event": "private_git_pull", + "task_id": self.request.id + } + + return results_git_pull_command + + +@celery.task(name="autopkg:repo_update", bind=True) +def autopkg_repo_update(self): + """Performs an `autopkg repo-update all`""" + + log.info("Updating parent recipe repos...") + autopkg_repo_update_command = f"{config.AutoPkg.get('binary')} repo-update all --prefs=\'{os.path.abspath(config.JamfPro_Dev.get('autopkg_prefs'))}\'" + + if task_utils.get_user_context(): + autopkg_repo_update_command = f"su - {task_utils.get_console_user()} -c \"{autopkg_repo_update_command}\"" + + results_autopkg_repo_update = utility.execute_process(autopkg_repo_update_command) + +##### This if statement can be removed after further real world testing... + if not results_autopkg_repo_update["success"]: + log.error("Failed to update parent recipe repos") + log.error(f"stdout:\n{results_autopkg_repo_update['stdout']}") + log.error(f"stderr:\n{results_autopkg_repo_update['stderr']}") + + results_autopkg_repo_update |= { + "event": "autopkg_repo_update", + "task_id": self.request.id + } + + return results_autopkg_repo_update + + +@celery.task(name="autopkg:run", bind=True) +def autopkg_run(self, recipes: list, autopkg_options: models.AutoPkgCMD | dict, called_by: str): + """Creates parent and individual recipe tasks. + + Args: + recipes (list): A list of recipe dicts, which contain their configurations + autopkg_options (models.AutoPkgCMD | dict): AutoPkg CLI options + called_by (str): From what/where the task was executed + + Returns: + dict: Dict describing the results of the ran process + """ + + # log.debug(f"Calling autopkg_options: {autopkg_options}") + # log.debug(f"recipes: {recipes}") + + # Track all child tasks that are queued by this parent task + queued_tasks = [] + promote = autopkg_options.pop("promote", False) + + if not promote and not autopkg_options.get("ignore_parent_trust"): + + # Run checks if we're not promoting the recipe + task_group = group([ autopkg_repo_update.signature(), git_pull_private_repo.signature() ]) + tasks_results = (task_group.apply_async( + queue='autopkg', priority=7)).get(disable_sync_subtasks=False) + + # Check results + for task_result in tasks_results: + if not task_result["success"]: + send_webhook.apply_async((self.request.id,), queue='autopkg', priority=9) + queued_tasks.append(task_result["task_id"]) + + for recipe in recipes: + + # log.debug(f"recipe: {recipe}") + recipe_id = recipe.get("recipe_id") + +##### Not yet supported + # if autopkg_options["override_keys"]: + # for override_key in autopkg_options["override_keys"]: + # extra_options = f"{extra_options} --key '{override_key}'" +##### How will the extra_options be passed? + + if not promote: + + if ( + called_by == "schedule" and + not task_utils.check_recipe_schedule(recipe.get("schedule"), recipe.get("last_ran")) + ): + log.debut(f"Recipe {recipe_id} is out of schedule") + continue + + _ = autopkg_options.pop("match_pkg", None) + _ = autopkg_options.pop("pkg_only", None) + _ = autopkg_options.pop("pkg_id", None) + + # If ignore parent trust, don't run autopkg_verify_trust + if autopkg_options.get("ignore_parent_trust"): + + queued_task = run_recipe.apply_async( + ({"success": True}, recipe_id, autopkg_options, called_by), + queue='autopkg', + priority=4 + ) + + queued_tasks.append(queued_task.id) + + else: + + # Verify trust info and wait + chain_results = chain( + autopkg_verify_trust.signature( + (recipe_id, autopkg_options, called_by), + queue='autopkg', + priority=2 + ) | + run_recipe.signature( + (recipe_id, autopkg_options, called_by), + queue='autopkg', + priority=3 + ) + )() + + queued_tasks.extend([chain_results.parent, chain_results.task_id]) + + else: + log.info(f"Promoting to production: {autopkg_options['match_pkg']}") + + autopkg_options |= { + "ignore_parent_trust": True, + "prefs": os.path.abspath(config.JamfPro_Prod.get("autopkg_prefs")), + "promote_recipe_id": recipe.get("recipe_id"), + "verbose": autopkg_options.get('verbose', 'vv') + } + + if recipe.get("pkg_only"): + # Only upload the .pkg, do not create/update a Policy + recipe_id = config.JamfPro_Prod.get("recipe_template_pkg_only") + autopkg_options |= { "pkg_only": True } + else: + recipe_id = config.JamfPro_Prod.get("recipe_template") + + queued_task = run_recipe.apply_async( + ({"event": "promote", "id": autopkg_options.pop("pkg_id")}, + recipe_id, + autopkg_options, + called_by), + queue='autopkg', priority=4 + ) + + queued_tasks.append(queued_task.id) + + return { "Queued background tasks": queued_tasks } + + +@celery.task(name="autopkg:run_recipe", bind=True) +def run_recipe(self, parent_task_results: dict, recipe_id: str, + autopkg_options: models.AutoPkgCMD | dict, called_by: str): + """Runs the passed recipe id against `autopkg run`. + + Args: + parent_task_results (dict): Results from the calling task + recipe_id (str): Recipe ID of a recipe + autopkg_options (models.AutoPkgCMD | dict): AutoPkg CLI options + called_by (str): From what/where the task was executed + + Returns: + dict: Dict describing the results of the ran process + """ + + run_type = "recipe_run_prod" if parent_task_results.get("event") == "promote" else "recipe_run_dev" + + # Verify not a promote run and parent tasks results were success + if ( + run_type != "recipe_run_prod" + and parent_task_results + and not parent_task_results["success"] + ): + + # event_id = "" + + if f"{recipe_id}: FAILED" in parent_task_results["stderr"]: + log_msg = "Failed to verify trust info for" + event_type = "verify_trust_info" + + elif "Didn't find a recipe for" in parent_task_results["stdout"]: + log_msg = "Failed to locate" + event_type = "error" + + else: + # Generic error + log_msg = "Unknown failure occurred on" + event_type = "error" + + log.error(f"{log_msg} recipe: {recipe_id}") + send_webhook.apply_async((self.request.id,), queue='autopkg', priority=9) + + return { + "event": event_type, + # "event_id": event_id, + "called_by": called_by, + "recipe_id": recipe_id, + "success": parent_task_results["success"], + "stdout": parent_task_results["stdout"], + "stderr": parent_task_results["stderr"] + } + + else: + log.info(f"Creating `autopkg run` task for recipe: {recipe_id}") + + # Generate AutoPkg options + options = task_utils.generate_autopkg_args(**autopkg_options) + # Build the autopkg command + cmd = f"{config.AutoPkg.get('binary')} run {recipe_id} {options}" + + if task_utils.get_user_context(): + cmd = f"su - {task_utils.get_console_user()} -c \"{cmd}\"" + + # log.debug(f"Command to execute: {cmd}") + results = utility.execute_process(cmd) + + # Send task complete notification + send_webhook.apply_async((self.request.id,), queue='autopkg', priority=9) + + return { + "event": run_type, + "event_id": parent_task_results.get("id"), + "recipe_id": recipe_id, + "success": results["success"], + "stdout": results["stdout"], + "stderr": results["stderr"], + } + + +@celery.task(name="autopkg:verify-trust", bind=True) +def autopkg_verify_trust(self, recipe_id: str, + autopkg_options: models.AutoPkgCMD | dict, called_by: str): + """Runs the passed recipe id against `autopkg verify-trust-info`. + + Args: + recipe_id (str): Recipe ID of a recipe + autopkg_options (models.AutoPkgCMD | dict): AutoPkg CLI options + called_by (str): From what/where the task was executed + + Returns: + dict: Dict describing the results of the ran process + """ + + log.info(f"Verifying trust info for: {recipe_id}") + + # Not overriding verbose when verifying trust info + _ = autopkg_options.pop('verbose') + + autopkg_options |= { + "prefs": os.path.abspath(config.JamfPro_Dev.get("autopkg_prefs")), + "verbose": "vvv" + } + + # Generate AutoPkg options + options = task_utils.generate_autopkg_args(**autopkg_options) + # Build the autopkg command + cmd = f"{config.AutoPkg.get('binary')} verify-trust-info {recipe_id} {options}" + + if task_utils.get_user_context(): + cmd = f"su - {task_utils.get_console_user()} -c \"{cmd}\"" + + # log.debug(f"Command to execute: {cmd}") + results = utility.execute_process(cmd) + + if called_by in {"api", "slack"} and not self.request.parent_id: + send_webhook.apply_async((self.request.id,), queue='autopkg', priority=9) + + return { + "event": "verify_trust_info", + "called_by": called_by, + "recipe_id": recipe_id, + "success": results["success"], + "stdout": results["stdout"], + "stderr": results["stderr"], + } + + return results + + +@celery.task(name="autopkg:update-trust", bind=True) +def autopkg_update_trust(self, recipe_id: str, + autopkg_options: models.AutoPkgCMD | dict, trust_id: int = None): + """Runs the passed recipe id against `autopkg update-trust-info`. + + Args: + recipe_id (str): Recipe ID of a recipe + autopkg_options (models.AutoPkgCMD | dict): AutoPkg CLI options + trust_id (int): The database id to associate the results to the record + + Returns: + dict: Dict describing the results of the ran process + """ + + log.info(f"Updating trust info for: {recipe_id}") + + # Generate AutoPkg options + autopkg_options = task_utils.generate_autopkg_args( + prefs=os.path.abspath(config.JamfPro_Dev.get("autopkg_prefs"))) + + repo_push_branch = config.Git.get("repo_push_branch") + stashed = False + + try: + + private_repo = git.Repo(os.path.expanduser(config.Git.get("local_repo_dir"))) + + if private_repo.is_dirty(): + _ = private_repo.git.stash() + stashed = True + + active_branch = private_repo.active_branch + local_branches = [ branch.name for branch in private_repo.branches ] + + if repo_push_branch not in local_branches: + _ = private_repo.git.branch(repo_push_branch) + + if repo_push_branch != active_branch: + _ = private_repo.git.checkout(repo_push_branch) + + cmd = f"{config.AutoPkg.get('binary')} update-trust-info {recipe_id} {autopkg_options}" + + if task_utils.get_user_context(): + cmd = f"su - {task_utils.get_console_user()} -c \"{cmd}\"" + + # log.debug(f"Command to execute: {cmd}") + results = utility.execute_process(cmd) + + if results["success"] and private_repo.git.diff(): + + log.info(f"Successfully updated trust for: {recipe_id}") + recipe_file_path = results["stdout"].split("Wrote updated ")[-1] + # log.debug(f"Updated recipe filename: {recipe_file_path}") + # Stage recipe_file_path + _ = private_repo.index.add([recipe_file_path]) + + _ = private_repo.git.commit( + "--message", "Updated Trust Info", "--message", + f"By: {config.Slack.get('bot_name')}" + ) + + _ = private_repo.git.push("--set-upstream", "origin", repo_push_branch) + log.info("Successfully updated private repo") + + except Exception as error: + log.error(f"Failed to updated private repo due to:\n{error}") + results = { + "success": False, + "stdout": error, + "stderr": error + } + + if stashed: + private_repo.git.stash("pop") + + send_webhook.apply_async((self.request.id,), queue='autopkg', priority=9) + + return { + "event": "update_trust_info", + "event_id": trust_id, + "recipe_id": recipe_id, + "success": results["success"], + "stdout": results["stdout"], + "stderr": results["stderr"], + } diff --git a/pkgbot/tasks/task_utils.py b/pkgbot/tasks/task_utils.py new file mode 100644 index 0000000..5ec57b0 --- /dev/null +++ b/pkgbot/tasks/task_utils.py @@ -0,0 +1,102 @@ +import os +import re + +from datetime import datetime, timedelta + +from celery.result import AsyncResult + +from pkgbot import config +from pkgbot.utilities import common as utility + + +config = config.load_config() + + +def get_task_results(task_id): + """ Return task info for the given task_id """ + + return AsyncResult(task_id) + + +def get_user_context(): + + return os.getlogin() == "root" and os.getenv("USER") is None + + +def get_console_user(): + + # Get the Console User + results_console_user = utility.execute_process( + "/usr/sbin/scutil", "show State:/Users/ConsoleUser") + return re.sub( + "(Name : )|(\n)", "", ( re.search("Name : .*\n", results_console_user["stdout"])[0] )) + + +def check_recipe_schedule(interval, last_ran): + """Check if a recipe should be ran, based on the configured schedule. + + Args: + interval (int): The "schedule" in number of days to not for + last_ran (str): datetime object in str format when recipe was last ran + + Returns: + boolean: + True: Recipe should be ran + False: Recipe should not be ran + """ + + if interval != 0 and last_ran != None: + current_time = utility.utc_to_local(datetime.now()) + last_ran_time = datetime.fromisoformat(last_ran) + interval_in_hours = interval * 24 + return current_time - last_ran_time > timedelta(hours=interval_in_hours) + + return True + + +def api_url_helper(): + secure = "s" if config.PkgBot.get("enable_ssl") else "" + pkgbot_server = f"http{secure}://{config.PkgBot.get('host')}:{config.PkgBot.get('port')}" + headers = { "Content-Type": "application/json" } + return pkgbot_server, headers + + +def generate_autopkg_args(**kwargs): + + options = "" + + # AutoPkg args + if kwargs.get("verbose"): + options = f"{options} -{kwargs.get('verbose')}" + + if kwargs.get("ignore_parent_trust"): + options = f"{options} --ignore-parent-trust-verification-errors" + + if kwargs.get("prefs"): + options = f"{options} --prefs=\'{kwargs.get('prefs')}\'" + else: + options = f"{options} --prefs=\'{os.path.abspath(config.JamfPro_Dev.get('autopkg_prefs'))}\'" + + # PkgBot args + if kwargs.get("promote_recipe_id"): + options = f"{options} --key \'RECIPE_ID={kwargs.get('promote_recipe_id')}\'" + + if kwargs.get("match_pkg"): + options = f"{options} --key \'MATCH_PKG={kwargs.get('match_pkg')}\'" + + if kwargs.get("pkg_only"): + options = f"{options} --key \'PKG_ONLY=True\'" + + return options.lstrip() + + +def compare_branch_heads(repo, local_branch, remote_branch): + + return [ + int(x) for x in ( + repo.git.rev_list( + "--left-right", + "--count", + f"{local_branch}...{remote_branch}@{{u}}" + )).split('\t') + ] diff --git a/templates/base.html b/pkgbot/templates/base.html similarity index 100% rename from templates/base.html rename to pkgbot/templates/base.html diff --git a/pkgbot/templates/index.html b/pkgbot/templates/index.html new file mode 100644 index 0000000..fff309d --- /dev/null +++ b/pkgbot/templates/index.html @@ -0,0 +1,26 @@ +{% extends 'base.html' %} + +{% block title %}Login{% endblock %} + +{% block content %} + + {% if session["protected_page"] %} + +
+ × + Access Denied: You must login before accessing this page! +
+ + {% endif %} + +

PkgBot: Systems Online

+ +

PkgBot will record software version history for Jamf Pro.

+ +

PkgBot is an automation framework for the open source project AutoPkg that provides a web-based front end and a Slack Bot to send notifications and receive commands. It helps manage the lifecycle of software packaging through package and version validation and then provides an interactive method to "promote" a specific package version from "development" (or "test") to production environments.

+ +

PkgBot provides this workflow utilizing Jamf Pro and the JamfUploader line of Processors. A Slack Bot is used to send new build notifications and allows a PkgBot Admin to interact with those notifications.

+ +

This site provides visibility into third party software version history including each packages' current status, plus details on the available AutoPkg recipes.

+ +{% endblock %} \ No newline at end of file diff --git a/templates/login.html b/pkgbot/templates/login.html similarity index 100% rename from templates/login.html rename to pkgbot/templates/login.html diff --git a/templates/package.html b/pkgbot/templates/package.html similarity index 100% rename from templates/package.html rename to pkgbot/templates/package.html diff --git a/templates/packages.html b/pkgbot/templates/packages.html similarity index 81% rename from templates/packages.html rename to pkgbot/templates/packages.html index 5e24c96..ff306b5 100644 --- a/templates/packages.html +++ b/pkgbot/templates/packages.html @@ -55,18 +55,21 @@ {% set sort_on = request.sort_on | default("id") %} {% for app in packages | sort(attribute=sort_on) %} + {% if app["icon"] %} + + {% elif app["jps_url"] and app["icon_id"] %} - {{ app.id }} - + {% else %} + + {% endif %} + {{ app.id }} {{ app.name }} {{ app.version }} {{ app.status }} {{ app.status_updated_by }} {{ app.packaged_date | strftime }} {{ app.promoted_date | strftime }} - - {{ app.do_not_delete }} - + {{ app.common }} {{ app.special_flags }} {{ app.notes }} diff --git a/templates/recipe.html b/pkgbot/templates/recipe.html similarity index 100% rename from templates/recipe.html rename to pkgbot/templates/recipe.html diff --git a/templates/recipes.html b/pkgbot/templates/recipes.html similarity index 92% rename from templates/recipes.html rename to pkgbot/templates/recipes.html index 0d10d5a..2b5bd3b 100644 --- a/templates/recipes.html +++ b/pkgbot/templates/recipes.html @@ -55,10 +55,11 @@ {% set sort_on = request.sort_on | default("id") %} {% for recipe in recipes | sort(attribute=sort_on) %} - {{ recipe.id }} + {{ recipe.id }} {{ recipe.recipe_id }} {{ recipe.name }} {{ recipe.enabled }} + {{ recipe.manual_only }} {{ recipe.pkg_only }} {{ recipe.last_ran | strftime }} {{ recipe.schedule }} diff --git a/pkgbot/utilities/__init__.py b/pkgbot/utilities/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pkgbot/utilities/celery.py b/pkgbot/utilities/celery.py new file mode 100644 index 0000000..b7bdbef --- /dev/null +++ b/pkgbot/utilities/celery.py @@ -0,0 +1,21 @@ +from celery import current_app as current_celery_app + +from pkgbot import settings + + +def create_celery(): + celery_app = current_celery_app + celery_app.config_from_object(settings.celery.settings, namespace='CELERY') + celery_app.conf.update(task_track_started=True) + celery_app.conf.update(task_serializer='pickle') + celery_app.conf.update(result_serializer='pickle') + celery_app.conf.update(accept_content=['pickle', 'json']) + celery_app.conf.update(result_expires=200) + celery_app.conf.update(result_persistent=True) + celery_app.conf.update(worker_send_task_events=False) + celery_app.conf.update(worker_prefetch_multiplier=1) + + return celery_app + + +##### This file currently isn't used diff --git a/pkgbot/utilities/common.py b/pkgbot/utilities/common.py new file mode 100755 index 0000000..bf3cb04 --- /dev/null +++ b/pkgbot/utilities/common.py @@ -0,0 +1,300 @@ +import asyncio +import hashlib +import hmac +import logging.config +import os +# import pickle +import plistlib +import re +import shlex +import subprocess +import yaml + +from datetime import datetime, timezone +from distutils.util import strtobool + +# from sqlalchemy import create_engine +# from sqlalchemy.orm import sessionmaker + +from pkgbot import config + + +config = config.load_config() + + +def log_setup(name="PkgBot"): + + logger = logging.getLogger(name) + + if not logger.hasHandlers(): + logger.debug("LOGGER HAS NO HANDLERS!") + + # Get the log configuration + log_config = yaml.safe_load(f"{config.PkgBot.get('log_config')}") + + # Load log configuration + logging.config.dictConfig(log_config) + + else: + logger.debug("Logger has handlers!") + + # Create logger + return logger + + +log = log_setup() + + +async def run_process_async(command, input=None): + """ + A helper function for asyncio's subprocess. + + Args: + command (str): The command line level syntax that would be + written in shell or a terminal window. + Returns: + Results in a dictionary. + """ + + # Validate that command is not a string + if not isinstance(command, str): + raise TypeError('Command must be a str type') + + # Format the command + # command = shlex.quote(command) + + # Run the command + process = await asyncio.create_subprocess_shell( + command, stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE) + + if input: + (stdout, stderr) = await process.communicate(input=bytes(input, "utf-8")) + else: + (stdout, stderr) = await process.communicate() + + return { + "stdout": (stdout.decode()).strip(), + "stderr": (stderr.decode()).strip() if stderr != None else None, + "status": process.returncode, + "success": True if process.returncode == 0 else False + } + + +def execute_process(command, input=None): + """ + A helper function for subprocess. + + Args: + command (str): The command line level syntax that would be + written in shell or a terminal window. + Returns: + Results in a dictionary. + """ + + # Validate that command is not a string + if not isinstance(command, str): + raise TypeError('Command must be a str type') + + # Format the command + # command = shlex.quote(command) + + # Run the command + process = subprocess.Popen( + command, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + if input: + (stdout, stderr) = process.communicate(input=bytes(input, "utf-8")) + else: + (stdout, stderr) = process.communicate() + + return { + "stdout": (stdout.decode()).strip(), + "stderr": (stderr.decode()).strip() if stderr != None else None, + "status": process.returncode, + "success": True if process.returncode == 0 else False + } + + +async def ask_yes_or_no(question): + """Ask a yes/no question via input() and determine the value of the answer. + + Args: + question: A string that is written to stdout + + Returns: + True of false based on the users' answer. + + """ + + print(f"{question} [Yes/No] ", end="") + + while True: + try: + return strtobool(input().lower()) + except ValueError: + print("Please respond with [yes|y] or [no|n]: ", end="") + + +async def plist_reader(plistFile): + """A helper function to get the contents of a Property List. + Args: + plistFile: A .plist file to read in. + Returns: + stdout: Returns the contents of the plist file. + """ + + if os.path.exists(plistFile): + with open(plistFile, "rb") as plist: + plist_contents = plistlib.load(plist) + return plist_contents + + +async def utc_to_local(utc_dt): + + return utc_dt.replace(tzinfo=timezone.utc).astimezone(tz=None) + + +async def string_to_datetime(datetime_string: str, format_string: str = "%Y-%m-%d %H:%M:%S.%f"): + + return datetime.strptime(datetime_string, format_string) + + +async def datetime_to_string(datetime_string: str, format_string: str = "%Y-%m-%d %I:%M:%S"): + + converted = datetime.fromisoformat(datetime_string) + return converted.strftime(format_string) + + +async def compute_hex_digest(key: bytes, + message: bytes, hash: hashlib._hashlib.HASH = hashlib.sha256): + + return hmac.new(key, message, hash).hexdigest() + + +async def load_yaml(config_file): + + # Load the recipe config + with open(config_file, 'rb') as config_file_path: + return yaml.safe_load(config_file_path) + + +async def save_yaml(contents, config_file): + """Writes the passed dict to the passed file. + + Args: + contents (dict): a updated dict object of recipes + config_file (str): path to the configuration file to update + """ + + with open(config_file, 'w', encoding="utf8") as config_file_path: + yaml.dump(contents, config_file_path) + + +async def replace_sensitive_strings(message, sensitive_strings=None, sensitive_regex_strings=None): + """Redact sensitive strings, such as passwords, serial numbers, license keys, etc. before + exporting to a non-secure location. + + Args: + message (str, Any): A message that could contain sensitive strings. If `message` is not a + string, it will be "converted" to a string via `str(message)`. + sensitive_strings (str, optional): A string of sensitive strings, separated by a `|` (pipe). + These strings will be Regex escaped. Defaults to None. + sensitive_regex_strings (str, optional): A string of sensitive strings in Regex format, + separated by a `|` (pipe). Defaults to None. + """ + + + async def parse_for_sensitive_keys(a_dict, sensitive_key_names): + + found_sensitive_strings = "" + + for key, value in a_dict.items(): + + if re.search(rf".*({sensitive_key_names}).*", key, re.IGNORECASE) and value: + + if found_sensitive_strings: + found_sensitive_strings = "|".join([found_sensitive_strings, re.escape(value)]) + else: + found_sensitive_strings = re.escape(value) + + return found_sensitive_strings + + + all_sensitive_strings = r"bearer\s[\w+.-]+|" + sensitive_key_names = r"password|secret|license|serial|key" + + if config.Common.get("additional_sensitive_key_names"): + sensitive_key_names += f"|{config.Common.get('additional_sensitive_key_names')}" + + for plist in [ + config.JamfPro_Prod.get("autopkg_prefs"), + config.JamfPro_Dev.get("autopkg_prefs") + ]: + plist_contents = await plist_reader(plist) + all_sensitive_strings += await parse_for_sensitive_keys(plist_contents, sensitive_key_names) + + for string in [ + config.Common.get("redaction_strings"), + sensitive_regex_strings + ]: + if string: + all_sensitive_strings = "|".join([all_sensitive_strings, string]) + + for string in [ + config.JamfPro_Dev.get("api_user"), + config.JamfPro_Dev.get("api_password"), + config.JamfPro_Dev.get("dp1_user"), + config.JamfPro_Dev.get("dp1_password"), + config.JamfPro_Prod.get("api_user"), + config.JamfPro_Prod.get("api_password"), + config.JamfPro_Prod.get("dp1_user"), + config.JamfPro_Prod.get("dp1_password"), + sensitive_strings + ]: + if string: + all_sensitive_strings = "|".join([all_sensitive_strings, re.escape(string)]) + + if isinstance(message, str): + return re.sub(rf"{all_sensitive_strings}", '', message) + + elif isinstance(message, dict): + + for key, value in message.items(): + if not isinstance(value, (bool, int)) and value is not None: + message[key] = re.sub(rf"{all_sensitive_strings}", '', value) + + return message + + else: + log.warning( + f"Unaccounted for type in sensitive string substitution! Type is: {type(message)}") + return re.sub(rf"{all_sensitive_strings}", '', str(message)) + + +# async def get_task_results(task_id: str): + +# # https://docs.sqlalchemy.org/en/14/core/engines.html#sqlite +# db_engine = create_engine(f"sqlite:///{config.Database.get('location')}") +# Session = sessionmaker(db_engine) + +# with Session() as session: +# result = session.execute(f"SELECT result from celery_taskmeta where task_id = '{task_id}';").fetchone() + +# return pickle.loads(result.result) + + +async def find_receipt_plist(content: str): + + run_receipt = re.search(r'Receipt written to (.*)', content)[1] + return await plist_reader(run_receipt) + + +async def parse_recipe_receipt(content: dict, key: str): + + for step in reversed(content): + if step.get(key): + return step.get(key) + elif re.search(key, step.get("Processor"), re.IGNORECASE): + return step diff --git a/requirements.txt b/requirements.txt old mode 100644 new mode 100755 index db80a1e..358f313 --- a/requirements.txt +++ b/requirements.txt @@ -1,37 +1,56 @@ -aiohttp==3.7.4.post0 -aiosqlite==0.16.1 -anyio==3.2.1 -async-timeout==3.0.1 -attrs==20.3.0 -certifi==2022.5.18.1 +aiohttp==3.8.1 +aiosignal==1.2.0 +aiosqlite==0.17.0 +amqp==5.1.1 +anyio==3.6.1 +async-timeout==4.0.2 +billiard==3.6.4.0 +celery==5.2.7 chardet==4.0.0 -click==7.1.2 -fastapi==0.63.0 -fastapi-login==1.6.0 +charset-normalizer==2.0.12 +click-didyoumean==0.3.0 +click-plugins==1.1.1 +click-repl==0.2.0 +fastapi==0.78.0 +fastapi-login==1.8.2 +fastapi-utils==0.2.1 +flower==1.0.0 +frozenlist==1.3.0 +gitdb==4.0.9 +GitPython==3.1.27 h11==0.12.0 -httpcore==0.13.6 -httpx==0.18.2 -idna==2.10 -iso8601==0.1.14 -Jinja2==2.11.3 -MarkupSafe==1.1.1 -multidict==5.1.0 +httpcore==0.15.0 +httpx==0.23.0 +humanize==4.2.1 +idna==3.3 +iso8601==1.0.2 +Jinja2==3.1.2 +kombu==5.2.4 +MarkupSafe==2.1.1 +multidict==6.0.2 passlib==1.7.4 -pydantic==1.8.1 -PyJWT==2.1.0 +prometheus-client==0.14.1 +prompt-toolkit==3.0.29 +pydantic==1.9.1 +PyJWT==2.4.0 pypika-tortoise==0.1.5 python-multipart==0.0.5 -pytz==2020.5 -PyYAML==5.4.1 -requests==2.25.1 +pytz==2022.1 +PyYAML==6.0 +requests==2.28.0 rfc3986==1.5.0 secure==0.3.0 six==1.16.0 -slack-sdk==3.4.2 +slack-sdk==3.17.2 +smmap==5.0.0 sniffio==1.2.0 -starlette==0.13.6 -tortoise-orm==0.17.3 -typing-extensions==3.7.4.3 -urllib3==1.26.4 -uvicorn==0.13.4 -yarl==1.6.3 +SQLAlchemy==1.4.39 +starlette==0.19.1 +tornado==6.1 +tortoise-orm==0.19.1 +typing-extensions==4.2.0 +urllib3==1.26.9 +uvicorn==0.18.1 +vine==5.0.0 +wcwidth==0.2.5 +yarl==1.7.2 diff --git a/templates/index.html b/templates/index.html deleted file mode 100644 index 2619e4e..0000000 --- a/templates/index.html +++ /dev/null @@ -1,26 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Login{% endblock %} - -{% block content %} - - {% if session["protected_page"] %} - -
- × - Access Denied: You must login before accessing this page! -
- - {% endif %} - -

PkgBot: Systems Online

- -

PkgBot will record software version history for Jamf Pro.

- -

PkgBot is a framework to manage software packaging, testing, and promoting from a development to production environment. It utilizes the open source project AutoPkg to download and package software. From there, the JSSImporter Processor is utilized to upload packages into Jamf Pro. Several customizations have been made to provide the functionality that PkgBot offers.

- -

Within this site, you will find software version history including it's current status, as well as AutoPkg recipe configurations and status.

- -

This site will provide Site Admins greater visibility into third party software updates and more in the future. We hope this project promotes more collaboration with Deskside groups to validate software .pkgs before they're made available in production and customer systems.

- -{% endblock %} \ No newline at end of file diff --git a/utils.py b/utils.py deleted file mode 100644 index 1e39232..0000000 --- a/utils.py +++ /dev/null @@ -1,184 +0,0 @@ -#!/usr/local/autopkg/python - -import asyncio -import hashlib -import hmac -import logging.config -import os -import plistlib -import re -import shlex -import subprocess -import yaml - -from datetime import datetime, timezone, tzinfo -from distutils.util import strtobool - -import config - - -config.load() - - -def log_setup(name="PkgBot"): - - logger = logging.getLogger(name) - - if not logger.hasHandlers(): - logger.debug("LOGGER HAS NO HANDLERS!") - - # Get the log configuration - log_config = yaml.safe_load("{}".format(config.pkgbot_config.get("PkgBot.log_config"))) - - # Load log configuration - logging.config.dictConfig(log_config) - - else: - logger.debug("Logger has handlers!") - - # Create logger - return logger - - -log = log_setup() - - -async def run_process_async(command, input=None): - """ - A helper function for asyncio's subprocess. - - Args: - command: The command line level syntax that would be - written in shell or a terminal window. (str) - Returns: - Results in a dictionary. - """ - - # Validate that command is not a string - if not isinstance(command, str): - raise TypeError('Command must be a str type') - - # Format the command - # command = shlex.quote(command) - - # Run the command - process = await asyncio.create_subprocess_shell( - command, stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE) - - if input: - (stdout, stderr) = await process.communicate(input=bytes(input, "utf-8")) - - else: - (stdout, stderr) = await process.communicate() - - return { - "stdout": (stdout.decode()).strip(), - "stderr": (stderr.decode()).strip() if stderr != None else None, - "status": process.returncode, - "success": True if process.returncode == 0 else False - } - - -async def ask_yes_or_no(question): - """Ask a yes/no question via input() and determine the value of the answer. - - Args: - question: A string that is written to stdout - - Returns: - True of false based on the users' answer. - - """ - - print("{} [Yes/No] ".format(question), end="") - - while True: - - try: - return strtobool(input().lower()) - - except ValueError: - print("Please respond with [yes|y] or [no|n]: ", end="") - - -async def plist_reader(plistFile): - """A helper function to get the contents of a Property List. - Args: - plistFile: A .plist file to read in. - Returns: - stdout: Returns the contents of the plist file. - """ - - if os.path.exists(plistFile): - - with open(plistFile, "rb") as plist: - - plist_contents = plistlib.load(plist) - - return plist_Contents - - -async def utc_to_local(utc_dt): - - return utc_dt.replace(tzinfo=timezone.utc).astimezone(tz=None) - - -async def string_to_datetime(datetime_string: str, format_string: str = None): - - if not format_string: - format_string = "%Y-%m-%d %H:%M:%S.%f" - - return datetime.strptime(datetime_string, format_string) - - -async def datetime_to_string(datetime_string: str, format_string: str = None): - - if not format_string: - format_string = "%Y-%m-%d %I:%M:%S" - - converted = datetime.fromisoformat(str(datetime_string)) - - return converted.strftime(format_string) - - -async def compute_hex_digest(key: bytes, message: bytes): - - return hmac.new( key, message, hashlib.sha256 ).hexdigest() - - -async def load_yaml(config_file): - - # Load the recipe config - with open(config_file, 'rb') as config_file_path: - return yaml.safe_load(config_file_path) - - -async def save_yaml(contents, config_file): - """Writes the passed dict to the passed file. - - Args: - contents (dict): a updated dict object of recipes - config_file (str): path to the configuration file to update - """ - - with open(config_file, 'w', encoding="utf8") as config_file_path: - yaml.dump(contents, config_file_path) - - -async def replace_sensitive_strings(message, sensitive_strings=None): - - default_sensitive_strings = r'{}|{}|{}|{}|{}|{}'.format( - config.pkgbot_config.get("JamfPro_Prod.api_password"), - config.pkgbot_config.get("JamfPro_Prod.api_user"), - config.pkgbot_config.get("JamfPro_Prod.dp1_user"), - config.pkgbot_config.get("JamfPro_Prod.dp1_password"), - config.pkgbot_config.get("Common.RedactionStrings"), - r"bearer\s[\w+.-]+" - ) - - if sensitive_strings: - - default_sensitive_strings = "{}|{}".format(default_sensitive_strings, sensitive_strings) - - return re.sub(default_sensitive_strings, '', message)