1
0
out-of-tree/.github/workflows/debian-cache.yml

115 lines
3.0 KiB
YAML

name: Debian Cache
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *'
push:
paths:
- '.github/workflows/debian-cache.yml'
- 'distro/debian/snapshot/**'
- 'distro/debian/cache.go'
- 'distro/debian/kernel.go'
jobs:
debian-kernel-metadata-cache:
name: Metadata
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Build
run: go build
- name: Cache
run: ./out-of-tree --log-level=trace distro debian cache --refetch=0
- name: Install s3cmd
run: sudo apt install s3cmd
- name: Archive cache
uses: actions/upload-artifact@v3
with:
name: debian-cache
path: ~/.out-of-tree/debian.cache
- name: Archive logs
if: always()
uses: actions/upload-artifact@v3
with:
name: debian-cache-logs
path: ~/.out-of-tree/logs
- name: Upload cache
run: s3cmd put --acl-public ~/.out-of-tree/debian.cache s3://out-of-tree/1.0.0/ --host=fra1.digitaloceanspaces.com --host-bucket='%(bucket)s.fra1.digitaloceanspaces.com' --access_key=${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} --secret_key=${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
debian-kernel-packages-mirror:
name: Packages
needs: debian-kernel-metadata-cache
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Build
run: go build
- name: Install s3cmd
run: sudo apt install s3cmd
- name: Mirror deb packages
shell: python
run: |
import os
import logging
import time
import datetime
from subprocess import getstatusoutput
def get_kernels() -> bool:
status, output = getstatusoutput(
"./out-of-tree distro debian fetch --max=16"
)
logging.info(output)
return status == 0
def upload(f: str) -> bool:
status, output = getstatusoutput(
"s3cmd "
"--host=fra1.digitaloceanspaces.com "
"--host-bucket='%(bucket)s.fra1.digitaloceanspaces.com' "
"--access_key=${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} "
"--secret_key=${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} "
f"put --acl-public {f} "
"s3://out-of-tree/1.0.0/packages/debian/"
)
logging.info(output)
return status == 0
logging.basicConfig(level=logging.NOTSET)
uploaded = []
timeout = time.time() + datetime.timedelta(hours=2).seconds
while get_kernels() and time.time() < timeout:
for f in os.listdir():
if not f.endswith('.deb'):
continue
if f in uploaded:
continue
logging.info(f)
ok = upload(f)
if ok:
uploaded += [f]
- name: Archive logs
if: always()
uses: actions/upload-artifact@v3
with:
name: debian-cache-logs
path: ~/.out-of-tree/logs