Skip to content

Port 4250 add default port resources for integration (#8) #49

Port 4250 add default port resources for integration (#8)

Port 4250 add default port resources for integration (#8) #49

name: Release integrations
on:
push:
branches:
- main
workflow_dispatch:
jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ secrets.DOCKER_MACHINE_USER }}
password: ${{ secrets.DOCKER_MACHINE_TOKEN }}
- name: Build and push Docker image
run: |
GHCR_TOKEN=$(echo ${{ secrets.GITHUB_TOKEN }} | base64)
files=$(find integrations -name "ocean-spec.yaml")
for file in $files; do
# Get the type from ocean-spec.yaml
type=$(yq eval '.type' "$file")
folder=$(dirname "$file")
# Get the version from ocean-spec.yaml
version=$(yq eval '.version' "$file")
# Check if the version exists in the ghcr.io registry
MANIFEST=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: Bearer ${GHCR_TOKEN}" "https://ghcr.io/v2/port-labs/port-ocean-$type/manifests/$version")
if [ "$MANIFEST" = "200" ]; then
echo "Image already exists in $repository: port-ocean-$type:$version"
else
echo "Building and pushing new image: port-ocean-$type:$version"
docker build -t "ghcr.io/port-labs/port-ocean-$type:$version" -t "ghcr.io/port-labs/port-ocean-$type:latest" "$folder"
docker push "ghcr.io/port-labs/port-ocean-$type" --all-tags
fi
done
- name: Configure AWS Credentials 🔒
id: aws-credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Upload specifications to s3
run: |
# Directory to search for ocean-spec.yaml files
directory="integrations"
# Temporary file to store the concatenated YAML content
temp_file="temp.yaml"
# Output file name
output_file="index.json"
# AWS S3 bucket details
aws_s3_bucket="ocean-registry"
# Find all ocean-spec.yaml files under the specified directory
find "$directory" -type f -name "ocean-spec.yaml" > file_list.txt
# Concatenate the YAML files into a temporary file
while IFS= read -r file; do
cat "$file" >> "$temp_file"
echo "---" >> "$temp_file"
done < file_list.txt
# Convert the concatenated YAML file to JSON using yq (install it first if not already installed)
yq -y . < "$temp_file" > "$output_file"
aws s3 cp "$output_file" "s3://$aws_s3_bucket/$output_file"