arvados.commands.run

  1# Copyright (C) The Arvados Authors. All rights reserved.
  2# Copyright (C) 2018 Genome Research Ltd.
  3#
  4# SPDX-License-Identifier: Apache-2.0
  5#
  6# Licensed under the Apache License, Version 2.0 (the "License");
  7# you may not use this file except in compliance with the License.
  8# You may obtain a copy of the License at
  9#
 10#    http://www.apache.org/licenses/LICENSE-2.0
 11#
 12# Unless required by applicable law or agreed to in writing, software
 13# distributed under the License is distributed on an "AS IS" BASIS,
 14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 15# See the License for the specific language governing permissions and
 16# limitations under the License.
 17
 18import arvados
 19import arvados.commands.ws as ws
 20import argparse
 21import json
 22import re
 23import os
 24import stat
 25from . import put
 26import time
 27import subprocess
 28import logging
 29import sys
 30import errno
 31import arvados.commands._util as arv_cmd
 32import arvados.collection
 33import arvados.config as config
 34
 35from arvados._version import __version__
 36
 37logger = logging.getLogger('arvados.arv-run')
 38logger.setLevel(logging.INFO)
 39
 40class ArvFile(object):
 41    def __init__(self, prefix, fn):
 42        self.prefix = prefix
 43        self.fn = fn
 44
 45    def __hash__(self):
 46        return (self.prefix+self.fn).__hash__()
 47
 48    def __eq__(self, other):
 49        return (self.prefix == other.prefix) and (self.fn == other.fn)
 50
 51class UploadFile(ArvFile):
 52    pass
 53
 54# Determine if a file is in a collection, and return a tuple consisting of the
 55# portable data hash and the path relative to the root of the collection.
 56# Return None if the path isn't with an arv-mount collection or there was is error.
 57def is_in_collection(root, branch):
 58    try:
 59        if root == "/":
 60            return (None, None)
 61        fn = os.path.join(root, ".arvados#collection")
 62        if os.path.exists(fn):
 63            with open(fn, 'r') as f:
 64                c = json.load(f)
 65            return (c["portable_data_hash"], branch)
 66        else:
 67            sp = os.path.split(root)
 68            return is_in_collection(sp[0], os.path.join(sp[1], branch))
 69    except (IOError, OSError):
 70        return (None, None)
 71
 72# Determine the project to place the output of this command by searching upward
 73# for arv-mount psuedofile indicating the project.  If the cwd isn't within
 74# an arv-mount project or there is an error, return current_user.
 75def determine_project(root, current_user):
 76    try:
 77        if root == "/":
 78            return current_user
 79        fn = os.path.join(root, ".arvados#project")
 80        if os.path.exists(fn):
 81            with file(fn, 'r') as f:
 82                c = json.load(f)
 83            if 'writable_by' in c and current_user in c['writable_by']:
 84                return c["uuid"]
 85            else:
 86                return current_user
 87        else:
 88            sp = os.path.split(root)
 89            return determine_project(sp[0], current_user)
 90    except (IOError, OSError):
 91        return current_user
 92
 93# Determine if string corresponds to a file, and if that file is part of a
 94# arv-mounted collection or only local to the machine.  Returns one of
 95# ArvFile() (file already exists in a collection), UploadFile() (file needs to
 96# be uploaded to a collection), or simply returns prefix+fn (which yields the
 97# original parameter string).
 98def statfile(prefix, fn, fnPattern="$(file %s/%s)", dirPattern="$(dir %s/%s/)", raiseOSError=False):
 99    absfn = os.path.abspath(fn)
100    try:
101        st = os.stat(absfn)
102        sp = os.path.split(absfn)
103        (pdh, branch) = is_in_collection(sp[0], sp[1])
104        if pdh:
105            if stat.S_ISREG(st.st_mode):
106                return ArvFile(prefix, fnPattern % (pdh, branch))
107            elif stat.S_ISDIR(st.st_mode):
108                return ArvFile(prefix, dirPattern % (pdh, branch))
109            else:
110                raise Exception("%s is not a regular file or directory" % absfn)
111        else:
112            # trim leading '/' for path prefix test later
113            return UploadFile(prefix, absfn[1:])
114    except OSError as e:
115        if e.errno == errno.ENOENT and not raiseOSError:
116            pass
117        else:
118            raise
119
120    return prefix+fn
121
122def write_file(collection, pathprefix, fn, flush=False):
123    with open(os.path.join(pathprefix, fn), "rb") as src:
124        dst = collection.open(fn, "wb")
125        r = src.read(1024*128)
126        while r:
127            dst.write(r)
128            r = src.read(1024*128)
129        dst.close(flush=flush)
130
131def uploadfiles(files, api, dry_run=False, num_retries=0,
132                project=None,
133                fnPattern="$(file %s/%s)",
134                name=None,
135                collection=None,
136                packed=True):
137    # Find the smallest path prefix that includes all the files that need to be uploaded.
138    # This starts at the root and iteratively removes common parent directory prefixes
139    # until all file paths no longer have a common parent.
140    if files:
141        n = True
142        pathprefix = "/"
143        while n:
144            pathstep = None
145            for c in files:
146                if pathstep is None:
147                    sp = c.fn.split('/')
148                    if len(sp) < 2:
149                        # no parent directories left
150                        n = False
151                        break
152                    # path step takes next directory
153                    pathstep = sp[0] + "/"
154                else:
155                    # check if pathstep is common prefix for all files
156                    if not c.fn.startswith(pathstep):
157                        n = False
158                        break
159            if n:
160                # pathstep is common parent directory for all files, so remove the prefix
161                # from each path
162                pathprefix += pathstep
163                for c in files:
164                    c.fn = c.fn[len(pathstep):]
165
166        logger.info("Upload local files: \"%s\"", '" "'.join([c.fn for c in files]))
167
168    if dry_run:
169        logger.info("$(input) is %s", pathprefix.rstrip('/'))
170        pdh = "$(input)"
171    else:
172        files = sorted(files, key=lambda x: x.fn)
173        if collection is None:
174            collection = arvados.collection.Collection(api_client=api, num_retries=num_retries)
175        prev = ""
176        for f in files:
177            localpath = os.path.join(pathprefix, f.fn)
178            if prev and localpath.startswith(prev+"/"):
179                # If this path is inside an already uploaded subdirectory,
180                # don't redundantly re-upload it.
181                # e.g. we uploaded /tmp/foo and the next file is /tmp/foo/bar
182                # skip it because it starts with "/tmp/foo/"
183                continue
184            prev = localpath
185            if os.path.isfile(localpath):
186                write_file(collection, pathprefix, f.fn, not packed)
187            elif os.path.isdir(localpath):
188                for root, dirs, iterfiles in os.walk(localpath):
189                    root = root[len(pathprefix):]
190                    for src in iterfiles:
191                        write_file(collection, pathprefix, os.path.join(root, src), not packed)
192
193        pdh = None
194        if len(collection) > 0:
195            # non-empty collection
196            filters = [["portable_data_hash", "=", collection.portable_data_hash()]]
197            name_pdh = "%s (%s)" % (name, collection.portable_data_hash())
198            if name:
199                filters.append(["name", "=", name_pdh])
200            if project:
201                filters.append(["owner_uuid", "=", project])
202
203            # do the list / create in a loop with up to 2 tries as we are using `ensure_unique_name=False`
204            # and there is a potential race with other workflows that may have created the collection
205            # between when we list it and find it does not exist and when we attempt to create it.
206            tries = 2
207            while pdh is None and tries > 0:
208                exists = api.collections().list(filters=filters, limit=1).execute(num_retries=num_retries)
209
210                if exists["items"]:
211                    item = exists["items"][0]
212                    pdh = item["portable_data_hash"]
213                    logger.info("Using collection %s (%s)", pdh, item["uuid"])
214                else:
215                    try:
216                        collection.save_new(name=name_pdh, owner_uuid=project, ensure_unique_name=False)
217                        pdh = collection.portable_data_hash()
218                        logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
219                    except arvados.errors.ApiError as ae:
220                        tries -= 1
221            if pdh is None:
222                # Something weird going on here, probably a collection
223                # with a conflicting name but wrong PDH.  We won't
224                # able to reuse it but we still need to save our
225                # collection, so so save it with unique name.
226                logger.info("Name conflict on '%s', existing collection has an unexpected portable data hash", name_pdh)
227                collection.save_new(name=name_pdh, owner_uuid=project, ensure_unique_name=True)
228                pdh = collection.portable_data_hash()
229                logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
230        else:
231            # empty collection
232            pdh = collection.portable_data_hash()
233            assert (pdh == config.EMPTY_BLOCK_LOCATOR), "Empty collection portable_data_hash did not have expected locator, was %s" % pdh
234            logger.debug("Using empty collection %s", pdh)
235
236    for c in files:
237        c.keepref = "%s/%s" % (pdh, c.fn)
238        c.fn = fnPattern % (pdh, c.fn)
239
240
241def main(arguments=None):
242    raise Exception("Legacy arv-run removed.")
243
244if __name__ == '__main__':
245    main()
logger = <Logger arvados.arv-run (INFO)>
class ArvFile:
41class ArvFile(object):
42    def __init__(self, prefix, fn):
43        self.prefix = prefix
44        self.fn = fn
45
46    def __hash__(self):
47        return (self.prefix+self.fn).__hash__()
48
49    def __eq__(self, other):
50        return (self.prefix == other.prefix) and (self.fn == other.fn)
ArvFile(prefix, fn)
42    def __init__(self, prefix, fn):
43        self.prefix = prefix
44        self.fn = fn
prefix
fn
class UploadFile(ArvFile):
52class UploadFile(ArvFile):
53    pass
Inherited Members
ArvFile
ArvFile
prefix
fn
def is_in_collection(root, branch):
58def is_in_collection(root, branch):
59    try:
60        if root == "/":
61            return (None, None)
62        fn = os.path.join(root, ".arvados#collection")
63        if os.path.exists(fn):
64            with open(fn, 'r') as f:
65                c = json.load(f)
66            return (c["portable_data_hash"], branch)
67        else:
68            sp = os.path.split(root)
69            return is_in_collection(sp[0], os.path.join(sp[1], branch))
70    except (IOError, OSError):
71        return (None, None)
def determine_project(root, current_user):
76def determine_project(root, current_user):
77    try:
78        if root == "/":
79            return current_user
80        fn = os.path.join(root, ".arvados#project")
81        if os.path.exists(fn):
82            with file(fn, 'r') as f:
83                c = json.load(f)
84            if 'writable_by' in c and current_user in c['writable_by']:
85                return c["uuid"]
86            else:
87                return current_user
88        else:
89            sp = os.path.split(root)
90            return determine_project(sp[0], current_user)
91    except (IOError, OSError):
92        return current_user
def statfile( prefix, fn, fnPattern='$(file %s/%s)', dirPattern='$(dir %s/%s/)', raiseOSError=False):
 99def statfile(prefix, fn, fnPattern="$(file %s/%s)", dirPattern="$(dir %s/%s/)", raiseOSError=False):
100    absfn = os.path.abspath(fn)
101    try:
102        st = os.stat(absfn)
103        sp = os.path.split(absfn)
104        (pdh, branch) = is_in_collection(sp[0], sp[1])
105        if pdh:
106            if stat.S_ISREG(st.st_mode):
107                return ArvFile(prefix, fnPattern % (pdh, branch))
108            elif stat.S_ISDIR(st.st_mode):
109                return ArvFile(prefix, dirPattern % (pdh, branch))
110            else:
111                raise Exception("%s is not a regular file or directory" % absfn)
112        else:
113            # trim leading '/' for path prefix test later
114            return UploadFile(prefix, absfn[1:])
115    except OSError as e:
116        if e.errno == errno.ENOENT and not raiseOSError:
117            pass
118        else:
119            raise
120
121    return prefix+fn
def write_file(collection, pathprefix, fn, flush=False):
123def write_file(collection, pathprefix, fn, flush=False):
124    with open(os.path.join(pathprefix, fn), "rb") as src:
125        dst = collection.open(fn, "wb")
126        r = src.read(1024*128)
127        while r:
128            dst.write(r)
129            r = src.read(1024*128)
130        dst.close(flush=flush)
def uploadfiles( files, api, dry_run=False, num_retries=0, project=None, fnPattern='$(file %s/%s)', name=None, collection=None, packed=True):
132def uploadfiles(files, api, dry_run=False, num_retries=0,
133                project=None,
134                fnPattern="$(file %s/%s)",
135                name=None,
136                collection=None,
137                packed=True):
138    # Find the smallest path prefix that includes all the files that need to be uploaded.
139    # This starts at the root and iteratively removes common parent directory prefixes
140    # until all file paths no longer have a common parent.
141    if files:
142        n = True
143        pathprefix = "/"
144        while n:
145            pathstep = None
146            for c in files:
147                if pathstep is None:
148                    sp = c.fn.split('/')
149                    if len(sp) < 2:
150                        # no parent directories left
151                        n = False
152                        break
153                    # path step takes next directory
154                    pathstep = sp[0] + "/"
155                else:
156                    # check if pathstep is common prefix for all files
157                    if not c.fn.startswith(pathstep):
158                        n = False
159                        break
160            if n:
161                # pathstep is common parent directory for all files, so remove the prefix
162                # from each path
163                pathprefix += pathstep
164                for c in files:
165                    c.fn = c.fn[len(pathstep):]
166
167        logger.info("Upload local files: \"%s\"", '" "'.join([c.fn for c in files]))
168
169    if dry_run:
170        logger.info("$(input) is %s", pathprefix.rstrip('/'))
171        pdh = "$(input)"
172    else:
173        files = sorted(files, key=lambda x: x.fn)
174        if collection is None:
175            collection = arvados.collection.Collection(api_client=api, num_retries=num_retries)
176        prev = ""
177        for f in files:
178            localpath = os.path.join(pathprefix, f.fn)
179            if prev and localpath.startswith(prev+"/"):
180                # If this path is inside an already uploaded subdirectory,
181                # don't redundantly re-upload it.
182                # e.g. we uploaded /tmp/foo and the next file is /tmp/foo/bar
183                # skip it because it starts with "/tmp/foo/"
184                continue
185            prev = localpath
186            if os.path.isfile(localpath):
187                write_file(collection, pathprefix, f.fn, not packed)
188            elif os.path.isdir(localpath):
189                for root, dirs, iterfiles in os.walk(localpath):
190                    root = root[len(pathprefix):]
191                    for src in iterfiles:
192                        write_file(collection, pathprefix, os.path.join(root, src), not packed)
193
194        pdh = None
195        if len(collection) > 0:
196            # non-empty collection
197            filters = [["portable_data_hash", "=", collection.portable_data_hash()]]
198            name_pdh = "%s (%s)" % (name, collection.portable_data_hash())
199            if name:
200                filters.append(["name", "=", name_pdh])
201            if project:
202                filters.append(["owner_uuid", "=", project])
203
204            # do the list / create in a loop with up to 2 tries as we are using `ensure_unique_name=False`
205            # and there is a potential race with other workflows that may have created the collection
206            # between when we list it and find it does not exist and when we attempt to create it.
207            tries = 2
208            while pdh is None and tries > 0:
209                exists = api.collections().list(filters=filters, limit=1).execute(num_retries=num_retries)
210
211                if exists["items"]:
212                    item = exists["items"][0]
213                    pdh = item["portable_data_hash"]
214                    logger.info("Using collection %s (%s)", pdh, item["uuid"])
215                else:
216                    try:
217                        collection.save_new(name=name_pdh, owner_uuid=project, ensure_unique_name=False)
218                        pdh = collection.portable_data_hash()
219                        logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
220                    except arvados.errors.ApiError as ae:
221                        tries -= 1
222            if pdh is None:
223                # Something weird going on here, probably a collection
224                # with a conflicting name but wrong PDH.  We won't
225                # able to reuse it but we still need to save our
226                # collection, so so save it with unique name.
227                logger.info("Name conflict on '%s', existing collection has an unexpected portable data hash", name_pdh)
228                collection.save_new(name=name_pdh, owner_uuid=project, ensure_unique_name=True)
229                pdh = collection.portable_data_hash()
230                logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
231        else:
232            # empty collection
233            pdh = collection.portable_data_hash()
234            assert (pdh == config.EMPTY_BLOCK_LOCATOR), "Empty collection portable_data_hash did not have expected locator, was %s" % pdh
235            logger.debug("Using empty collection %s", pdh)
236
237    for c in files:
238        c.keepref = "%s/%s" % (pdh, c.fn)
239        c.fn = fnPattern % (pdh, c.fn)
def main(arguments=None):
242def main(arguments=None):
243    raise Exception("Legacy arv-run removed.")