forked from Revolutionary-Games/Thrive
-
Notifications
You must be signed in to change notification settings - Fork 0
/
uploader.rb
240 lines (191 loc) · 6.6 KB
/
uploader.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
# frozen_string_literal: true
require 'json'
require 'httparty'
require 'parallel'
require 'sha3'
require_relative 'dehydrate'
# Common upload functions
# This is a separate file from the upload script to leave open the
# option of the release build making script starting the upload while
# another build is being created
DEFAULT_PARALLEL_UPLOADS = 5
DEVCENTER_URL = 'https://dev.revolutionarygamesstudio.com'
MAX_SERVER_BATCH_SIZE = 100
# Manages uploading dehydrated devbuilds and the object cache entries missing from the server
class DevBuildUploader
def initialize(folder, cache, options)
@folder = folder
@cache = cache
@parallel = options[:parallel_upload]
@base_url = options[:url]
@retries = options[:retries]
@dehydrated_to_upload = []
@devbuilds_to_upload = []
@access_key = ENV.fetch('THRIVE_DEVCENTER_ACCESS_KEY', nil)
puts 'Uploading anonymous devbuilds' unless @access_key
end
def headers
if @access_key
{ 'X-Access-Code' => @access_key }
else
{}
end
end
# Run the upload operation
def run
info 'Starting devbuild upload'
Dir.glob(File.join(@folder, '*.meta.json')) do |file|
archive_file = file.chomp '.meta.json'
check_build_for_upload file, archive_file
end
@dehydrated_to_upload.uniq!
info "Beginning upload of #{@devbuilds_to_upload.size} devbuilds with "\
"#{@dehydrated_to_upload.size} dehydrated objects"
perform_uploads
success 'DevBuild upload finished.'
end
private
def perform_uploads
info 'Fetching tokens'
things_to_upload = fetch_upload_tokens
info 'Uploading dehydrated objects'
Parallel.map(things_to_upload, in_threads: @parallel) do |obj|
upload(*obj)
end
info 'Fetching tokens'
things_to_upload = fetch_devbuild_upload_tokens
info 'Uploading devbuilds'
Parallel.map(things_to_upload, in_threads: @parallel) do |obj|
upload(*obj)
end
success 'Done uploading'
end
def fetch_upload_tokens
# TODO: if the internet is slow it might not be a good idea to
# fetch all of the tokens at once
result = []
@dehydrated_to_upload.each_slice(MAX_SERVER_BATCH_SIZE) do |group|
data = with_retry do
HTTParty.post(URI.join(@base_url, '/api/v1/devbuild/upload_objects'),
headers: headers, body: {
objects: group.map do |i|
{ sha3: i, size: object_size(i) }
end
})
end
data['upload'].each do |upload|
result.append [path_from_hash(upload['sha3']), upload['upload_url'],
upload['verify_token']]
end
end
result
end
def fetch_devbuild_upload_tokens
result = []
@devbuilds_to_upload.each do |build|
data = with_retry do
HTTParty.post(URI.join(@base_url, '/api/v1/devbuild/upload_devbuild'),
headers: headers, body: {
build_hash: build[:version],
build_branch: build[:branch],
build_platform: build[:platform],
build_size: File.size(build[:file]),
required_objects: build[:dehydrated_objects],
build_zip_hash: build[:build_zip_hash]
})
end
onError "failed to receive upload url, response: #{data}" unless data['upload_url']
result.append [build[:file], data['upload_url'],
data['verify_token']]
end
result
end
def check_build_for_upload(file, archive_file)
meta = JSON.parse File.read(file)
begin
version = meta['version']
platform = meta['platform']
branch = meta['branch']
dehydrated_objects = meta['dehydrated']['objects']
raise 'no version in file' unless version
rescue StandardError
onError 'Invalid devbuild meta content'
end
puts "Found devbuild: #{version}, #{platform}, #{branch}"
data = with_retry do
HTTParty.post(URI.join(@base_url, '/api/v1/devbuild/offer_devbuild'),
headers: headers, body: {
build_hash: version,
build_platform: platform
})
end
return unless data['upload']
puts "Server doesn't have it."
@devbuilds_to_upload.append({ file: archive_file, version: version, platform: platform,
branch: branch, dehydrated_objects: dehydrated_objects,
build_zip_hash:
SHA3::Digest::SHA256.file(archive_file).hexdigest })
# Determine related objects to upload
# MAX_SERVER_BATCH_SIZE
dehydrated_objects.each_slice(MAX_SERVER_BATCH_SIZE) do |group|
data = with_retry do
HTTParty.post(URI.join(@base_url, '/api/v1/devbuild/offer_objects'),
headers: headers, body: {
objects: group.map do |i|
{ sha3: i, size: object_size(i) }
end
})
end
data['upload'].each do |upload|
@dehydrated_to_upload.append upload
end
end
end
def path_from_hash(hash)
File.join(@cache, "#{hash}.gz")
end
def object_size(hash)
File.size path_from_hash(hash)
end
# Does the whole upload process
def upload(file, url, token)
puts "Uploading file #{file}"
put_file file, url
# Tell the server about upload success
with_retry do
HTTParty.post(URI.join(@base_url, '/api/v1/devbuild/finish'),
headers: headers, body: {
token: token
})
end
end
# Puts file to storage URL
def put_file(file, url)
with_retry do
HTTParty.put(url, headers: { 'Content-Length' => File.size(file).to_s },
body_stream: File.open(file, 'rb'))
end
end
def with_retry(needed_response_code: 200)
time_to_wait = 20
(1..@retries).each do |i|
begin
response = yield
if response.code == 503
puts "Error 503: waiting #{time_to_wait} seconds..."
sleep(time_to_wait)
time_to_wait *= 2
elsif response.code != needed_response_code
puts "Response: #{response}"
raise "unexpected response code: #{response.code}"
end
return response
rescue StandardError => e
puts "HTTP request failed: #{e}, " +
(i < @retries ? "retry attempt #{i}" : 'ran out of retries')
sleep 1
end
end
raise 'HTTP request ran out of retries'
end
end