Skip to content
Snippets Groups Projects
Commit 4fc0a460 authored by Philipp Goymann's avatar Philipp Goymann
Browse files

Merge branch 'unstabel' into 'master'

Unstabel

See merge request !12
parents 4e3c9355 f63ce434
No related branches found
No related tags found
1 merge request!12Unstabel
Pipeline #393379 passed
......@@ -7,6 +7,10 @@ stages: # List of stages for jobs, and their order of execution
build-prod: # This job runs in the build stage, which runs first.
stage: build_prod
image: docker:latest
tags:
- internal
only:
- master
services:
- name: docker:dind
alias: docker
......@@ -16,5 +20,24 @@ build-prod: # This job runs in the build stage, which runs first.
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build --pull -t "$CI_REGISTRY_IMAGE:latest" .
- docker build --pull -t "$CI_REGISTRY_IMAGE:$(git describe --tags --abbrev=0)" .
- docker push "$CI_REGISTRY_IMAGE:$(git describe --tags --abbrev=0)"
- docker push "$CI_REGISTRY_IMAGE:latest"
build-dev: # This job runs in the build stage, which runs first.
stage: build_prod
tags:
- internal
only:
- unstabel
image: docker:latest
services:
- name: docker:dind
alias: docker
before_script:
- apk update && apk add git
- apt-get update -qq && apt-get install -y -qq git
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build --pull -t "$CI_REGISTRY_IMAGE:dev" .
- docker push "$CI_REGISTRY_IMAGE:dev"
......@@ -40,21 +40,18 @@ def run_s3_functions():
bucketname = args.bucketname
if args.files:
files = args.files
#after s3 server update: s3 server no longer accepts capital letters in bucketname = crash -> force lower case
if bucketname:
buckename = bucketname.lower()
if bucketname != s3.confirm_bucket_name(bucketname):
s3.__exception_log__('Bucket name '+ bucketname + ' invalid. Possible would be ' + s3.confirm_bucket_name(bucketname))
#------------------------------------------------actions--------------------------------------------#
#check if bucket exists
#do NOT change this output in any way as this is being parsed by other tools (ckuenne: deploy_igv.sh)
if args.bucket_exists:
utils.check_argparser(args, ['secret', "key", 'bucketname'])
if s3.check_s3_bucket_ex(args.bucketname):
print('Found bucket ' + args.bucketname + ': yes')
if s3.check_s3_bucket_ex(bucketname):
print('Found bucket ' + bucketname + ': yes')
else:
print('Found bucket ' + args.bucketname + ': no')
print('Found bucket ' + bucketname + ': no')
if args.upload:
if args.read_form_dict:
......@@ -69,6 +66,18 @@ def run_s3_functions():
utils.check_argparser(args, ['secret', "key", "files", "bucketname"])
s3.download_s3_objects( bucketname, files, destination=output, download_bar=True)
if args.create_bucket:
utils.check_argparser(args, ['secret', "key", 'bucketname'])
s3.create_s3_bucket(bucketname, name_addition=True)
if args.create_bucket:
utils.check_argparser(args, ['secret', "key", 'bucketname'])
s3.create_s3_bucket(bucketname, name_addition=True)
if args.delete_bucket:
utils.check_argparser(args, ['secret', "key", 'bucketname'])
s3.emptie_s3_buckets(bucketname, delete_bucket = True)
#--------------------------------------------------------------------------------------------------------#
# parse command line arguments:
def argparsefunc():
......@@ -83,6 +92,8 @@ def argparsefunc():
parser.add_argument("--upload", action='store_true', help="upload files to bucket")
parser.add_argument("--download", action='store_true', help="download files to bucket")
parser.add_argument("--bucket_exists", action='store_true', help="check if bucket exists")
parser.add_argument("--create_bucket", action='store_true', help="create bucket")
parser.add_argument("--delete_bucket", action='store_true', help="create bucket")
#values
#values
......@@ -105,3 +116,4 @@ def argparsefunc():
if __name__ == "__main__":
run_s3_functions()
......@@ -154,13 +154,18 @@ class Loosolab_s3:
Boolean
does bucket exist?
"""
try:
self.session.meta.client.head_bucket(Bucket=bucket_name)
buckets = [bucket.name for bucket in self.session.buckets.all()]
if bucket_name in buckets:
return True
else:
return False
except botocore.exceptions.ClientError as e:
if not e.response['Error']['Code'] == "404":
self.__exception_log__("Something went wrong checking for " + bucket_name, e)
return False
return True
#--------------------------------------------------------------------------------------------------------#
def check_s3_object_ex(self, bucket_name, file_name):
......@@ -369,6 +374,7 @@ class Loosolab_s3:
bucket_list = [bucket_names]
else:
bucket_list = bucket_names
for bucket_name in bucket_list:
try:
bucket = self.session.Bucket(bucket_name)
......@@ -424,7 +430,7 @@ class Loosolab_s3:
return match_list
except Exception as e:
self.logger.error('Objects in Bucket ' + bucket_name + 'could not be listed! ' + str(e))
#--------------------------------------------------------------------------------------------------------#
def upload_s3_objects(self, bucket_name, file_list, compare=True):
"""Creating an s3 bucket.
......@@ -440,6 +446,7 @@ class Loosolab_s3:
--------
Boolean
"""
modBool = False
if not self.check_s3_bucket_ex(bucket_name):
self.__exception_log__("Bucket for upload does not exist!")
......@@ -507,8 +514,8 @@ class Loosolab_s3:
meta_data = self.client.head_object(Bucket=s3_bucket, Key=s3_object_key)
total_length = float(meta_data.get('ContentLength', 0))
downloaded = 0
part = total_length / 10
count = total_length / 10
part = total_length / 100
count = total_length / 100
def progress(chunk):
nonlocal downloaded
nonlocal part
......@@ -516,7 +523,8 @@ class Loosolab_s3:
downloaded += chunk
if downloaded > count:
print(str(int((100 / total_length ) * count)) + ' % Downloaded!')
print(str(int((100 / total_length ) * count)) + ' % Downloaded!', end="\r")
#print("\r[%s%s]" % (str(int((100 / total_length ) * count)) + ' % Downloaded!') , end="\r")
count += part
#print("\r[%s%s]" % ('=' * done, ' ' * (50-done)) , end="\r")
......@@ -525,4 +533,6 @@ class Loosolab_s3:
self.logger.info(f'Downloading {s3_object_key}')
with open(local_file_name, 'wb') as f:
self.client.download_fileobj(s3_bucket, s3_object_key, f, Callback=progress)
\ No newline at end of file
self.client.download_fileobj(s3_bucket, s3_object_key, f, Callback=progress)
self.logger.info(f'Downloading finished {s3_object_key}')
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment