Skip to content

Commit

Permalink
Add upload field into umbrella spec
Browse files Browse the repository at this point in the history
this boolean field can be used to mark whether a dependency should
be uploaded into s3/osf or not.
  • Loading branch information
haiyanmeng committed Aug 19, 2016
1 parent a850651 commit 4166517
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 22 deletions.
7 changes: 6 additions & 1 deletion doc/umbrella.html
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,8 @@ <h2 id="create_spec">Create Your Specification<a class="sectionlink" href="#crea

<p><b>Common Attributes of OS, Software, Data Dependencies:</b></p>
<p>Each OS, software
and data dependency can have the following attributes: source, checksum, size, format, uncompressed_size.</p>
and data dependency can have the following attributes: source, checksum, size, format,
uncompressed_size, and upload.</p>

<p> These attributes are required in a self-contained umbrella specification,
but not required in an umbrella specification which is attached to a Metadata
Expand Down Expand Up @@ -620,6 +621,10 @@ <h2 id="create_spec">Create Your Specification<a class="sectionlink" href="#crea
<li><b>uncompressed_size</b>: the uncompressed size of the dependency in bytes, only meaningful
when the format attribute is not plain text.</li>

<li><b>upload (optional)</b>: whether this dependency will be uploaded into the storage services
like OSF and S3 or not. The type of this field is boolean. This field can be set to be
<tt>false</tt> or <tt>true</tt>.</li>

</ul>

<p><b>Relationship of id and checksum and source.</b> The checksum of each package
Expand Down
53 changes: 32 additions & 21 deletions umbrella/src/umbrella.py
Original file line number Diff line number Diff line change
Expand Up @@ -3462,35 +3462,42 @@ def spec_upload(spec_json, meta_json, target_info, sandbox_dir, osf_auth=None, s
mountpoint = '/'
action = 'unpack'

if spec_json["os"].has_key("source") or attr_check(item, meta_search(meta_json, item, os_id), "source", 1):
if spec_json["os"].has_key("source"):
sources = spec_json["os"]["source"]
else:
sources = meta_search(meta_json, item, os_id)["source"]
if not spec_json["os"].has_key("upload") or spec_json["os"]["upload"] == True:
if spec_json["os"].has_key("source") or attr_check(item, meta_search(meta_json, item, os_id), "source", 1):
if spec_json["os"].has_key("source"):
sources = spec_json["os"]["source"]
else:
sources = meta_search(meta_json, item, os_id)["source"]

if has_source(sources, target_info[0]):
logging.debug("The os section already has a url from %s!", target_info[0])
print "The os section already has a url from %s!" % target_info[0]
if has_source(sources, target_info[0]):
logging.debug("The os section already has a url from %s!", target_info[0])
print "The os section already has a url from %s!" % target_info[0]
else:
upload_count += 1
r3 = dependency_process(item, os_id, action, meta_json, sandbox_dir, osf_auth)
logging.debug("Add mountpoint (%s:%s) into mount_dict for /.", mountpoint, r3)
mount_dict[mountpoint] = r3
if target_info[0] == "osf":
osf_url = osf_upload(target_info[1], target_info[2], target_info[3], os_image_dir + ".tar.gz")
spec_json["os"]["source"].append("osf+" + osf_url)
elif target_info[0] == "s3":
s3_url = s3_upload(s3_bucket, os_image_dir + ".tar.gz", target_info[1])
spec_json["os"]["source"].append("s3+" + s3_url)
else:
upload_count += 1
r3 = dependency_process(item, os_id, action, meta_json, sandbox_dir, osf_auth)
logging.debug("Add mountpoint (%s:%s) into mount_dict for /.", mountpoint, r3)
mount_dict[mountpoint] = r3
if target_info[0] == "osf":
osf_url = osf_upload(target_info[1], target_info[2], target_info[3], os_image_dir + ".tar.gz")
spec_json["os"]["source"].append("osf+" + osf_url)
elif target_info[0] == "s3":
s3_url = s3_upload(s3_bucket, os_image_dir + ".tar.gz", target_info[1])
spec_json["os"]["source"].append("s3+" + s3_url)
cleanup(tempfile_list, tempdir_list)
logging.critical("the os section does not has source attr!")
sys.exit("the os section does not has source attr!")
else:
cleanup(tempfile_list, tempdir_list)
logging.critical("the os section does not has source attr!")
sys.exit("the os section does not has source attr!")
logging.debug("the os section has its upload field set to false, ignore uploading it")

for sec_name in ["data"]:
if spec_json.has_key(sec_name) and spec_json[sec_name]:
sec = spec_json[sec_name]
for item in sec:
if sec[item].has_key("upload") and sec[item]["upload"] == False:
logging.debug("ignore upload %s becuase its upload field is set to false", item)
continue

if sec[item].has_key("source") or attr_check(item, meta_search(meta_json, item, id), "source", 1):
if sec[item].has_key("source"):
sources = sec[item]["source"]
Expand Down Expand Up @@ -3532,6 +3539,10 @@ def spec_upload(spec_json, meta_json, target_info, sandbox_dir, osf_auth=None, s
break

for item in sec:
if sec[item].has_key("upload") and sec[item]["upload"] == False:
logging.debug("ignore upload %s becuase its upload field is set to false", item)
continue

if sec[item].has_key("source") or attr_check(item, meta_search(meta_json, item, id), "source", 1):
if sec[item].has_key("source"):
sources = sec[item]["source"]
Expand Down

0 comments on commit 4166517

Please sign in to comment.