diff --git a/.github/workflows/pr_build.yml b/.github/workflows/pr_build.yml index 2d603ea6f4..fba23772fe 100644 --- a/.github/workflows/pr_build.yml +++ b/.github/workflows/pr_build.yml @@ -105,6 +105,13 @@ jobs: shell: bash working-directory: ${{runner.workspace}}/build run: cat ./_CPack_Packages/win64/NSIS/NSISOutput.log + - name: Upload Artifact + shell: bash + working-directory: ${{runner.workspace}}/build + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + ARTIFACT_PATTERN: HighFidelity-Beta-PR${{ github.event.number }}-*.exe + run: python "$GITHUB_WORKSPACE\tools\ci-scripts\upload_to_publish_server.py" build_full_linux: runs-on: ubuntu-latest diff --git a/tools/ci-scripts/upload_to_publish_server.py b/tools/ci-scripts/upload_to_publish_server.py new file mode 100644 index 0000000000..8b363f36aa --- /dev/null +++ b/tools/ci-scripts/upload_to_publish_server.py @@ -0,0 +1,77 @@ +import os +import json +from hashlib import sha256 +import http.client +from http import HTTPStatus +import time +import struct +import random +import glob + +FILE_READ_BUFFER = 4096 + +path = os.path.join(os.getcwd(), os.environ['ARTIFACT_PATTERN']) +files = glob.glob(path, recursive=False) +uploading_files = [] +for archive_file in files: + file = open(archive_file, 'rb') + sha256_hash = sha256() + file.seek(0, 0) + for byte_block in iter(lambda: file.read(FILE_READ_BUFFER), b""): + sha256_hash.update(byte_block) + + checksum = sha256_hash.hexdigest() + + uploading_files.append({ + "filename": os.path.basename(archive_file), + "sha256_checksum": checksum, + "file_length": file.tell() + }) + file.close() + +print("BuildFileHashes: " + json.dumps(uploading_files)) + +file_contents = [] +file_sizes = [] + +for archiveFile in files: + file = open(archiveFile, 'rb') + file_data = file.read() + file_sizes.append(len(file_data)) + file_contents.append(file_data) + file.close() + +conn = http.client.HTTPSConnection("athena-build-uploader.thoys.nl") + +context = json.loads(os.environ['GITHUB_CONTEXT']) + +owner_and_repository = context["repository"].split("/") +owner = owner_and_repository[0] +repository = owner_and_repository[1] + +headers = { + "owner": owner, + "repo": repository, + "commit_hash": context["event"]["pull_request"]["head"]["sha"], + "pull_number": context["event"]["number"], + "job_name": os.environ["JOB_NAME"], + "run_id": context["run_id"], + "file_sizes": ','.join(str(e) for e in file_sizes) +} + +concat_file_body = b''.join(file_contents) + +print("Total files size: " + str(len(concat_file_body))) + +conn.request("PUT", "/", body=concat_file_body, headers=headers) +response = conn.getresponse() + +EXIT_CODE_OK = 0 +EXIT_CODE_ERROR = 1 + +if (response.status == HTTPStatus.OK): + print("response: ", json.loads(response.read())) + exit(EXIT_CODE_OK) +else: + print(response.status, response.reason, response.read()) + exit(EXIT_CODE_ERROR)