• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python file_io.is_directory函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tensorflow.python.lib.io.file_io.is_directory函数的典型用法代码示例。如果您正苦于以下问题:Python is_directory函数的具体用法?Python is_directory怎么用?Python is_directory使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了is_directory函数的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: __init__

    def __init__(self, file_io_module=None, bucket_name=None):
        if bucket_name is None:
            bucket_name = os.environ.get(self._test_bucket_env_key, None)
        if bucket_name is None:
            # will mock gcs locally for tests
            if file_io_module is None:
                raise ValueError('`file_io_module` must be provided for mocking')
            self.mock_gcs = True
            self.file_io_module = file_io_module
            self.local_objects = {}
            self.bucket_name = 'mock-bucket'
        else:
            # will use real bucket for tests
            if bucket_name.startswith(self._gcs_prefix):
                bucket_name = bucket_name[len(self._gcs_prefix):]
            self.bucket_name = bucket_name
            if tf_file_io is None:
                raise ImportError(
                    'tensorflow must be installed to read/write to GCS')
            try:
                # check that bucket exists and is accessible
                tf_file_io.is_directory(self.bucket_path)
            except:
                raise IOError(
                    'could not access provided bucket {}'.format(self.bucket_path))
            self.mock_gcs = False
            self.file_io_module = None
            self.local_objects = None

        self.patched_file_io = None
        self._is_started = False
开发者ID:ZhangXinNan,项目名称:keras,代码行数:31,代码来源:test_utils.py


示例2: testIsDirectory

 def testIsDirectory(self):
   dir_path = os.path.join(self._base_dir, "test_dir")
   # Failure for a non-existing dir.
   with self.assertRaises(errors.NotFoundError):
     file_io.is_directory(dir_path)
   file_io.create_dir(dir_path)
   self.assertTrue(file_io.is_directory(dir_path))
   file_path = os.path.join(dir_path, "test_file")
   file_io.FileIO(file_path, mode="w").write("test")
   # False for a file.
   self.assertFalse(file_io.is_directory(file_path))
开发者ID:JamesFysh,项目名称:tensorflow,代码行数:11,代码来源:file_io_test.py


示例3: testIsDirectory

 def testIsDirectory(self):
   dir_path = os.path.join(self._base_dir, "test_dir")
   # Failure for a non-existing dir.
   self.assertFalse(file_io.is_directory(dir_path))
   file_io.create_dir(dir_path)
   self.assertTrue(file_io.is_directory(dir_path))
   file_path = os.path.join(dir_path, "test_file")
   file_io.FileIO(file_path, mode="w").write("test")
   # False for a file.
   self.assertFalse(file_io.is_directory(file_path))
   # Test that the value returned from `stat()` has `is_directory` set.
   file_statistics = file_io.stat(dir_path)
   self.assertTrue(file_statistics.is_directory)
开发者ID:1000sprites,项目名称:tensorflow,代码行数:13,代码来源:file_io_test.py


示例4: create_dir_test

def create_dir_test():
    """Verifies file_io directory handling methods ."""

    starttime = int(round(time.time() * 1000))
    dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime)
    print("Creating dir %s" % dir_name)
    file_io.create_dir(dir_name)
    elapsed = int(round(time.time() * 1000)) - starttime
    print("Created directory in: %d milliseconds" % elapsed)
    # Check that the directory exists.
    dir_exists = file_io.is_directory(dir_name)
    print("%s directory exists: %s" % (dir_name, dir_exists))

    # List contents of just created directory.
    print("Listing directory %s." % dir_name)
    starttime = int(round(time.time() * 1000))
    print(file_io.list_directory(dir_name))
    elapsed = int(round(time.time() * 1000)) - starttime
    print("Listed directory %s in %s milliseconds" % (dir_name, elapsed))

    # Delete directory.
    print("Deleting directory %s." % dir_name)
    starttime = int(round(time.time() * 1000))
    file_io.delete_recursively(dir_name)
    elapsed = int(round(time.time() * 1000)) - starttime
    print("Deleted directory %s in %s milliseconds" % (dir_name, elapsed))
开发者ID:paolodedios,项目名称:tensorflow,代码行数:26,代码来源:gcs_smoke.py


示例5: _serve_sprite_image

  def _serve_sprite_image(self, request, query_params):
    run = query_params.get('run')
    if not run:
      request.respond('query parameter "run" is required', 'text/plain', 400)
      return

    name = query_params.get('name')
    if name is None:
      request.respond('query parameter "name" is required', 'text/plain', 400)
      return

    if run not in self.configs:
      request.respond('Unknown run: %s' % run, 'text/plain', 400)
      return

    config = self.configs[run]
    embedding_info = self._get_embedding(name, config)

    if not embedding_info or not embedding_info.sprite.image_path:
      request.respond(
          'No sprite image file found for tensor %s in the config file %s' %
          (name, self.config_fpaths[run]), 'text/plain', 400)
      return

    fpath = embedding_info.sprite.image_path
    if not file_io.file_exists(fpath) or file_io.is_directory(fpath):
      request.respond(
          '%s does not exist or is directory' % fpath, 'text/plain', 400)
      return
    f = file_io.FileIO(fpath, 'r')
    encoded_image_string = f.read()
    f.close()
    image_type = imghdr.what(None, encoded_image_string)
    mime_type = _IMGHDR_TO_MIMETYPE.get(image_type, _DEFAULT_IMAGE_MIMETYPE)
    request.respond(encoded_image_string, mime_type)
开发者ID:RapidApplicationDevelopment,项目名称:tensorflow,代码行数:35,代码来源:plugin.py


示例6: _serve_sprite_image

  def _serve_sprite_image(self, request):
    run = request.args.get('run')
    if not run:
      return Respond(request, 'query parameter "run" is required', 'text/plain',
                     400)

    name = request.args.get('name')
    if name is None:
      return Respond(request, 'query parameter "name" is required',
                     'text/plain', 400)

    if run not in self.configs:
      return Respond(request, 'Unknown run: "%s"' % run, 'text/plain', 400)

    config = self.configs[run]
    embedding_info = self._get_embedding(name, config)

    if not embedding_info or not embedding_info.sprite.image_path:
      return Respond(
          request,
          'No sprite image file found for tensor "%s" in the config file "%s"' %
          (name, self.config_fpaths[run]), 'text/plain', 400)

    fpath = os.path.expanduser(embedding_info.sprite.image_path)
    fpath = _rel_to_abs_asset_path(fpath, self.config_fpaths[run])
    if not file_io.file_exists(fpath) or file_io.is_directory(fpath):
      return Respond(request, '"%s" does not exist or is directory' % fpath,
                     'text/plain', 400)
    f = file_io.FileIO(fpath, 'rb')
    encoded_image_string = f.read()
    f.close()
    image_type = imghdr.what(None, encoded_image_string)
    mime_type = _IMGHDR_TO_MIMETYPE.get(image_type, _DEFAULT_IMAGE_MIMETYPE)
    return Respond(request, encoded_image_string, mime_type)
开发者ID:chenjun0210,项目名称:tensorflow,代码行数:34,代码来源:projector_plugin.py


示例7: _serve_bookmarks

  def _serve_bookmarks(self, query_params):
    run = query_params.get('run')
    if not run:
      self.handler.respond('query parameter "run" is required', 'text/plain',
                           400)
      return

    name = query_params.get('name')
    if name is None:
      self.handler.respond('query parameter "name" is required', 'text/plain',
                           400)
      return

    if run not in self.configs:
      self.handler.respond('Unknown run: %s' % run, 'text/plain', 400)
      return

    config = self.configs[run]
    fpath = self._get_bookmarks_file_for_tensor(name, config)
    if not fpath:
      self.handler.respond(
          'No bookmarks file found for tensor %s in the config file %s' %
          (name, self.config_fpaths[run]), 'text/plain', 400)
      return
    if not file_io.file_exists(fpath) or file_io.is_directory(fpath):
      self.handler.respond('%s is not a file' % fpath, 'text/plain', 400)
      return

    bookmarks_json = None
    with file_io.FileIO(fpath, 'r') as f:
      bookmarks_json = f.read()
    self.handler.respond(bookmarks_json, 'application/json')
开发者ID:821760408-sp,项目名称:tensorflow,代码行数:32,代码来源:plugin.py


示例8: _serve_bookmarks

  def _serve_bookmarks(self, request):
    run = request.args.get('run')
    if not run:
      return Respond(request, 'query parameter "run" is required', 'text/plain',
                     400)

    name = request.args.get('name')
    if name is None:
      return Respond(request, 'query parameter "name" is required',
                     'text/plain', 400)

    if run not in self.configs:
      return Respond(request, 'Unknown run: "%s"' % run, 'text/plain', 400)

    config = self.configs[run]
    fpath = self._get_bookmarks_file_for_tensor(name, config)
    if not fpath:
      return Respond(
          request,
          'No bookmarks file found for tensor "%s" in the config file "%s"' %
          (name, self.config_fpaths[run]), 'text/plain', 400)
    fpath = _rel_to_abs_asset_path(fpath, self.config_fpaths[run])
    if not file_io.file_exists(fpath) or file_io.is_directory(fpath):
      return Respond(request, '"%s" not found, or is not a file' % fpath,
                     'text/plain', 400)

    bookmarks_json = None
    with file_io.FileIO(fpath, 'rb') as f:
      bookmarks_json = f.read()
    return Respond(request, bookmarks_json, 'application/json')
开发者ID:chenjun0210,项目名称:tensorflow,代码行数:30,代码来源:projector_plugin.py


示例9: _serve_metadata

  def _serve_metadata(self, query_params):
    run = query_params.get('run')
    if run is None:
      self.handler.respond('query parameter "run" is required',
                           'text/plain', 400)
      return

    name = query_params.get('name')
    if name is None:
      self.handler.respond('query parameter "name" is required',
                           'text/plain', 400)
      return
    if run not in self.configs:
      self.handler.respond('Unknown run: %s' % run, 'text/plain', 400)
      return

    config = self.configs[run]
    fpath = self._get_metadata_file_for_tensor(name, config)
    if not fpath:
      self.handler.respond(
          'Not metadata file found for tensor %s in the config file %s' %
          (name, self.config_fpaths[run]), 'text/plain', 400)
      return
    if not file_io.file_exists(fpath) or file_io.is_directory(fpath):
      self.handler.respond('%s is not a file' % fpath, 'text/plain', 400)
      return

    with file_io.FileIO(fpath, 'r') as f:
      lines = []
      for line in f:
        lines.append(line)
        if len(lines) >= LIMIT_NUM_POINTS:
          break
    self.handler.respond(''.join(lines), 'text/plain')
开发者ID:821760408-sp,项目名称:tensorflow,代码行数:34,代码来源:plugin.py


示例10: recursive_copy

def recursive_copy(src_dir, dest_dir):
  """Copy the contents of src_dir into the folder dest_dir.
  Args:
    src_dir: gsc or local path.
    dest_dir: gcs or local path.
  """

  file_io.recursive_create_dir(dest_dir)
  for file_name in file_io.list_directory(src_dir):
    old_path = os.path.join(src_dir, file_name)
    new_path = os.path.join(dest_dir, file_name)

    if file_io.is_directory(old_path):
      recursive_copy(old_path, new_path)
    else:
      file_io.copy(old_path, new_path, overwrite=True)
开发者ID:javiervicho,项目名称:pydatalab,代码行数:16,代码来源:task.py


示例11: _serve_metadata

  def _serve_metadata(self, request):
    run = request.args.get('run')
    if run is None:
      return Respond(request, 'query parameter "run" is required', 'text/plain',
                     400)

    name = request.args.get('name')
    if name is None:
      return Respond(request, 'query parameter "name" is required',
                     'text/plain', 400)

    num_rows = _parse_positive_int_param(request, 'num_rows')
    if num_rows == -1:
      return Respond(request, 'query parameter num_rows must be integer > 0',
                     'text/plain', 400)

    if run not in self.configs:
      return Respond(request, 'Unknown run: "%s"' % run, 'text/plain', 400)

    config = self.configs[run]
    fpath = self._get_metadata_file_for_tensor(name, config)
    if not fpath:
      return Respond(
          request,
          'No metadata file found for tensor "%s" in the config file "%s"' %
          (name, self.config_fpaths[run]), 'text/plain', 400)
    fpath = _rel_to_abs_asset_path(fpath, self.config_fpaths[run])
    if not file_io.file_exists(fpath) or file_io.is_directory(fpath):
      return Respond(request, '"%s" not found, or is not a file' % fpath,
                     'text/plain', 400)

    num_header_rows = 0
    with file_io.FileIO(fpath, 'r') as f:
      lines = []
      # Stream reading the file with early break in case the file doesn't fit in
      # memory.
      for line in f:
        lines.append(line)
        if len(lines) == 1 and '\t' in lines[0]:
          num_header_rows = 1
        if num_rows and len(lines) >= num_rows + num_header_rows:
          break
    return Respond(request, ''.join(lines), 'text/plain')
开发者ID:chenjun0210,项目名称:tensorflow,代码行数:43,代码来源:projector_plugin.py


示例12: _recursive_copy

def _recursive_copy(src_dir, dest_dir):
  """Copy the contents of src_dir into the folder dest_dir.
  Args:
    src_dir: gsc or local path.
    dest_dir: gcs or local path.
  When called, dest_dir should exist.
  """
  src_dir = python_portable_string(src_dir)
  dest_dir = python_portable_string(dest_dir)

  file_io.recursive_create_dir(dest_dir)
  for file_name in file_io.list_directory(src_dir):
    old_path = os.path.join(src_dir, file_name)
    new_path = os.path.join(dest_dir, file_name)

    if file_io.is_directory(old_path):
      _recursive_copy(old_path, new_path)
    else:
      file_io.copy(old_path, new_path, overwrite=True)
开发者ID:googledatalab,项目名称:pydatalab,代码行数:19,代码来源:util.py


示例13: _serve_metadata

  def _serve_metadata(self, request, query_params):
    run = query_params.get('run')
    if run is None:
      request.respond('query parameter "run" is required', 'text/plain', 400)
      return

    name = query_params.get('name')
    if name is None:
      request.respond('query parameter "name" is required', 'text/plain', 400)
      return

    num_rows = _parse_positive_int_param(request, query_params, 'num_rows')
    if num_rows == -1:
      return

    if run not in self.configs:
      request.respond('Unknown run: %s' % run, 'text/plain', 400)
      return

    config = self.configs[run]
    fpath = self._get_metadata_file_for_tensor(name, config)
    if not fpath:
      request.respond(
          'No metadata file found for tensor %s in the config file %s' %
          (name, self.config_fpaths[run]), 'text/plain', 400)
      return
    if not file_io.file_exists(fpath) or file_io.is_directory(fpath):
      request.respond('%s is not a file' % fpath, 'text/plain', 400)
      return

    num_header_rows = 0
    with file_io.FileIO(fpath, 'r') as f:
      lines = []
      # Stream reading the file with early break in case the file doesn't fit in
      # memory.
      for line in f:
        lines.append(line)
        if len(lines) == 1 and '\t' in lines[0]:
          num_header_rows = 1
        if num_rows and len(lines) >= num_rows + num_header_rows:
          break
    request.respond(''.join(lines), 'text/plain')
开发者ID:RapidApplicationDevelopment,项目名称:tensorflow,代码行数:42,代码来源:plugin.py


示例14: load_library

def load_library(library_location):
  """Loads a TensorFlow plugin.

  "library_location" can be a path to a specific shared object, or a folder.
  If it is a folder, all sahred objects that are named "libtfkernel*" will be
  loaded. When the library is loaded, kernels registered in the library via the
  `REGISTER_*` macros are made available in the TensorFlow process.

  Args:
    library_location: Path to the plugin or the folder of plugins.
      Relative or absolute filesystem path to a dynamic library file or folder.

  Returns:
    None

  Raises:
    OSError: When the file to be loaded is not found.
    RuntimeError: when unable to load the library.
  """
  if file_io.file_exists(library_location):
    if file_io.is_directory(library_location):
      directory_contents = file_io.list_directory(library_location)

      kernel_libraries = [
          os.path.join(library_location, f) for f in directory_contents
          if _is_shared_object(f)]
    else:
      kernel_libraries = [library_location]

    for lib in kernel_libraries:
      py_tf.TF_LoadLibrary(lib)

  else:
    raise OSError(
        errno.ENOENT,
        'The file or folder to load kernel libraries from does not exist.',
        library_location)
开发者ID:JonathanRaiman,项目名称:tensorflow,代码行数:37,代码来源:load_library.py


示例15: export_fn

  def export_fn(estimator, export_dir_base, checkpoint_path=None, eval_result=None):
    with ops.Graph().as_default() as g:
      contrib_variables.create_global_step(g)

      input_ops = feature_transforms.build_csv_serving_tensors_for_training_step(
          args.analysis, features, schema, stats, keep_target)
      model_fn_ops = estimator._call_model_fn(input_ops.features,
                                              None,
                                              model_fn_lib.ModeKeys.INFER)
      output_fetch_tensors = make_prediction_output_tensors(
          args=args,
          features=features,
          input_ops=input_ops,
          model_fn_ops=model_fn_ops,
          keep_target=keep_target)

      # Don't use signature_def_utils.predict_signature_def as that renames
      # tensor names if there is only 1 input/output tensor!
      signature_inputs = {key: tf.saved_model.utils.build_tensor_info(tensor)
                          for key, tensor in six.iteritems(input_ops.default_inputs)}
      signature_outputs = {key: tf.saved_model.utils.build_tensor_info(tensor)
                           for key, tensor in six.iteritems(output_fetch_tensors)}
      signature_def_map = {
          'serving_default':
              signature_def_utils.build_signature_def(
                  signature_inputs,
                  signature_outputs,
                  tf.saved_model.signature_constants.PREDICT_METHOD_NAME)}

      if not checkpoint_path:
        # Locate the latest checkpoint
        checkpoint_path = saver.latest_checkpoint(estimator._model_dir)
      if not checkpoint_path:
        raise ValueError("Couldn't find trained model at %s."
                         % estimator._model_dir)

      export_dir = saved_model_export_utils.get_timestamped_export_dir(
          export_dir_base)

      if (model_fn_ops.scaffold is not None and
         model_fn_ops.scaffold.saver is not None):
        saver_for_restore = model_fn_ops.scaffold.saver
      else:
        saver_for_restore = saver.Saver(sharded=True)

      with tf_session.Session('') as session:
        saver_for_restore.restore(session, checkpoint_path)
        init_op = control_flow_ops.group(
            variables.local_variables_initializer(),
            resources.initialize_resources(resources.shared_resources()),
            tf.tables_initializer())

        # Perform the export
        builder = saved_model_builder.SavedModelBuilder(export_dir)
        builder.add_meta_graph_and_variables(
            session, [tag_constants.SERVING],
            signature_def_map=signature_def_map,
            assets_collection=ops.get_collection(
                ops.GraphKeys.ASSET_FILEPATHS),
            legacy_init_op=init_op)
        builder.save(False)

      # Add the extra assets
      if assets_extra:
        assets_extra_path = os.path.join(compat.as_bytes(export_dir),
                                         compat.as_bytes('assets.extra'))
        for dest_relative, source in assets_extra.items():
          dest_absolute = os.path.join(compat.as_bytes(assets_extra_path),
                                       compat.as_bytes(dest_relative))
          dest_path = os.path.dirname(dest_absolute)
          file_io.recursive_create_dir(dest_path)
          file_io.copy(source, dest_absolute)

    # only keep the last 3 models
    saved_model_export_utils.garbage_collect_exports(
        export_dir_base,
        exports_to_keep=3)

    # save the last model to the model folder.
    # export_dir_base = A/B/intermediate_models/
    if keep_target:
      final_dir = os.path.join(args.job_dir, 'evaluation_model')
    else:
      final_dir = os.path.join(args.job_dir, 'model')
    if file_io.is_directory(final_dir):
      file_io.delete_recursively(final_dir)
    file_io.recursive_create_dir(final_dir)
    recursive_copy(export_dir, final_dir)

    return export_dir
开发者ID:javiervicho,项目名称:pydatalab,代码行数:90,代码来源:task.py


示例16: export_fn

  def export_fn(estimator, export_dir_base, checkpoint_path=None, eval_result=None):
    with ops.Graph().as_default() as g:
      contrib_variables.create_global_step(g)

      input_ops = serving_from_csv_input(train_config, args, keep_target)
      model_fn_ops = estimator._call_model_fn(input_ops.features,
                                              None,
                                              model_fn_lib.ModeKeys.INFER)
      output_fetch_tensors = make_output_tensors(
          train_config=train_config,
          args=args,
          input_ops=input_ops,
          model_fn_ops=model_fn_ops,
          keep_target=keep_target)

      signature_def_map = {
        'serving_default': signature_def_utils.predict_signature_def(input_ops.default_inputs,
                                                                     output_fetch_tensors)
      }

      if not checkpoint_path:
        # Locate the latest checkpoint
        checkpoint_path = saver.latest_checkpoint(estimator._model_dir)
      if not checkpoint_path:
        raise NotFittedError("Couldn't find trained model at %s."
                             % estimator._model_dir)

      export_dir = saved_model_export_utils.get_timestamped_export_dir(
          export_dir_base)

      with tf_session.Session('') as session:
        # variables.initialize_local_variables()
        variables.local_variables_initializer()
        data_flow_ops.tables_initializer()
        saver_for_restore = saver.Saver(
            variables.global_variables(),
            sharded=True)
        saver_for_restore.restore(session, checkpoint_path)

        init_op = control_flow_ops.group(
            variables.local_variables_initializer(),
            data_flow_ops.tables_initializer())

        # Perform the export
        builder = saved_model_builder.SavedModelBuilder(export_dir)
        builder.add_meta_graph_and_variables(
            session, [tag_constants.SERVING],
            signature_def_map=signature_def_map,
            assets_collection=ops.get_collection(
                ops.GraphKeys.ASSET_FILEPATHS),
            legacy_init_op=init_op)
        builder.save(False)

      # Add the extra assets
      if assets_extra:
        assets_extra_path = os.path.join(compat.as_bytes(export_dir),
                                         compat.as_bytes('assets.extra'))
        for dest_relative, source in assets_extra.items():
          dest_absolute = os.path.join(compat.as_bytes(assets_extra_path),
                                       compat.as_bytes(dest_relative))
          dest_path = os.path.dirname(dest_absolute)
          gfile.MakeDirs(dest_path)
          gfile.Copy(source, dest_absolute)

    # only keep the last 3 models
    saved_model_export_utils.garbage_collect_exports(
        python_portable_string(export_dir_base),
        exports_to_keep=3)

    # save the last model to the model folder.
    # export_dir_base = A/B/intermediate_models/
    if keep_target:
      final_dir = os.path.join(args.job_dir, 'evaluation_model')
    else:
      final_dir = os.path.join(args.job_dir, 'model')
    if file_io.is_directory(final_dir):
      file_io.delete_recursively(final_dir)
    file_io.recursive_create_dir(final_dir)
    _recursive_copy(export_dir, final_dir)

    return export_dir
开发者ID:parthea,项目名称:pydatalab,代码行数:81,代码来源:util.py


示例17: create_dir_test

def create_dir_test():
  """Verifies file_io directory handling methods."""

  # Test directory creation.
  starttime_ms = int(round(time.time() * 1000))
  dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime_ms)
  print("Creating dir %s" % dir_name)
  file_io.create_dir(dir_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Created directory in: %d milliseconds" % elapsed_ms)

  # Check that the directory exists.
  dir_exists = file_io.is_directory(dir_name)
  assert dir_exists
  print("%s directory exists: %s" % (dir_name, dir_exists))

  # Test recursive directory creation.
  starttime_ms = int(round(time.time() * 1000))
  recursive_dir_name = "%s/%s/%s" % (dir_name,
                                     "nested_dir1",
                                     "nested_dir2")
  print("Creating recursive dir %s" % recursive_dir_name)
  file_io.recursive_create_dir(recursive_dir_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Created directory recursively in: %d milliseconds" % elapsed_ms)

  # Check that the directory exists.
  recursive_dir_exists = file_io.is_directory(recursive_dir_name)
  assert recursive_dir_exists
  print("%s directory exists: %s" % (recursive_dir_name, recursive_dir_exists))

  # Create some contents in the just created directory and list the contents.
  num_files = 10
  files_to_create = ["file_%d.txt" % n for n in range(num_files)]
  for file_num in files_to_create:
    file_name = "%s/%s" % (dir_name, file_num)
    print("Creating file %s." % file_name)
    file_io.write_string_to_file(file_name, "test file.")

  print("Listing directory %s." % dir_name)
  starttime_ms = int(round(time.time() * 1000))
  directory_contents = file_io.list_directory(dir_name)
  print(directory_contents)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Listed directory %s in %s milliseconds" % (dir_name, elapsed_ms))
  assert set(directory_contents) == set(files_to_create + ["nested_dir1/"])

  # Test directory renaming.
  dir_to_rename = "%s/old_dir" % dir_name
  new_dir_name = "%s/new_dir" % dir_name
  file_io.create_dir(dir_to_rename)
  assert file_io.is_directory(dir_to_rename)
  assert not file_io.is_directory(new_dir_name)

  starttime_ms = int(round(time.time() * 1000))
  print("Will try renaming directory %s to %s" % (dir_to_rename, new_dir_name))
  file_io.rename(dir_to_rename, new_dir_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Renamed directory %s to %s in %s milliseconds" % (
      dir_to_rename, new_dir_name, elapsed_ms))
  assert not file_io.is_directory(dir_to_rename)
  assert file_io.is_directory(new_dir_name)

  # Test Delete directory recursively.
  print("Deleting directory recursively %s." % dir_name)
  starttime_ms = int(round(time.time() * 1000))
  file_io.delete_recursively(dir_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  dir_exists = file_io.is_directory(dir_name)
  assert not dir_exists
  print("Deleted directory recursively %s in %s milliseconds" % (
      dir_name, elapsed_ms))
开发者ID:DILASSS,项目名称:tensorflow,代码行数:72,代码来源:gcs_smoke.py


示例18: is_directory

 def is_directory(cls, dirname):
     return file_io.is_directory(dirname)
开发者ID:idil77soltahanov,项目名称:hugin-1,代码行数:2,代码来源:IOUtils.py



注:本文中的tensorflow.python.lib.io.file_io.is_directory函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python file_io.read_file_to_string函数代码示例发布时间:2022-05-27
下一篇:
Python file_io.get_matching_files函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap