• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python file_io.delete_recursively函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tensorflow.python.lib.io.file_io.delete_recursively函数的典型用法代码示例。如果您正苦于以下问题:Python delete_recursively函数的具体用法?Python delete_recursively怎么用?Python delete_recursively使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了delete_recursively函数的16个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: testBadSavedModelFileFormat

  def testBadSavedModelFileFormat(self):
    export_dir = self._get_export_dir("test_bad_saved_model_file_format")
    # Attempt to load a SavedModel from an export directory that does not exist.
    with self.test_session(graph=ops.Graph()) as sess:
      with self.assertRaisesRegexp(IOError,
                                   "SavedModel file does not exist at: %s" %
                                   export_dir):
        loader.load(sess, ["foo"], export_dir)

    os.makedirs(export_dir)
    # Write an invalid binary proto to saved_model.pb.
    path_to_pb = os.path.join(export_dir, constants.SAVED_MODEL_FILENAME_PB)
    with open(path_to_pb, "w") as f:
      f.write("invalid content")
    with self.test_session(graph=ops.Graph()) as sess:
      with self.assertRaisesRegexp(IOError, "Cannot parse file.*%s" %
                                   constants.SAVED_MODEL_FILENAME_PB):
        loader.load(sess, ["foo"], export_dir)

    # Cleanup the directory and start again.
    file_io.delete_recursively(export_dir)

    os.makedirs(export_dir)
    # Write an invalid text proto to saved_model.pbtxt
    path_to_pbtxt = os.path.join(export_dir,
                                 constants.SAVED_MODEL_FILENAME_PBTXT)
    with open(path_to_pbtxt, "w") as f:
      f.write("invalid content")
    with self.test_session(graph=ops.Graph()) as sess:
      with self.assertRaisesRegexp(IOError, "Cannot parse file.*%s" %
                                   constants.SAVED_MODEL_FILENAME_PBTXT):
        loader.load(sess, ["foo"], export_dir)
开发者ID:KiaraStarlab,项目名称:tensorflow,代码行数:32,代码来源:saved_model_test.py


示例2: create_dir_test

def create_dir_test():
    """Verifies file_io directory handling methods ."""

    starttime = int(round(time.time() * 1000))
    dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime)
    print("Creating dir %s" % dir_name)
    file_io.create_dir(dir_name)
    elapsed = int(round(time.time() * 1000)) - starttime
    print("Created directory in: %d milliseconds" % elapsed)
    # Check that the directory exists.
    dir_exists = file_io.is_directory(dir_name)
    print("%s directory exists: %s" % (dir_name, dir_exists))

    # List contents of just created directory.
    print("Listing directory %s." % dir_name)
    starttime = int(round(time.time() * 1000))
    print(file_io.list_directory(dir_name))
    elapsed = int(round(time.time() * 1000)) - starttime
    print("Listed directory %s in %s milliseconds" % (dir_name, elapsed))

    # Delete directory.
    print("Deleting directory %s." % dir_name)
    starttime = int(round(time.time() * 1000))
    file_io.delete_recursively(dir_name)
    elapsed = int(round(time.time() * 1000)) - starttime
    print("Deleted directory %s in %s milliseconds" % (dir_name, elapsed))
开发者ID:paolodedios,项目名称:tensorflow,代码行数:26,代码来源:gcs_smoke.py


示例3: create_object_test

def create_object_test():
  """Verifies file_io's object manipulation methods ."""
  starttime = int(round(time.time() * 1000))
  dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime)
  print("Creating dir %s." % dir_name)
  file_io.create_dir(dir_name)

  # Create a file in this directory.
  file_name = "%s/test_file.txt" % dir_name
  print("Creating file %s." % file_name)
  file_io.write_string_to_file(file_name, "test file creation.")

  list_files_pattern = "%s/test_file*.txt" % dir_name
  print("Getting files matching pattern %s." % list_files_pattern)
  files_list = file_io.get_matching_files(list_files_pattern)
  print(files_list)

  assert len(files_list) == 1
  assert files_list[0] == file_name

  # Cleanup test files.
  print("Deleting file %s." % file_name)
  file_io.delete_file(file_name)

  # Delete directory.
  print("Deleting directory %s." % dir_name)
  file_io.delete_recursively(dir_name)
开发者ID:AutumnQYN,项目名称:tensorflow,代码行数:27,代码来源:gcs_smoke.py


示例4: testCreateRecursiveDir

 def testCreateRecursiveDir(self):
   dir_path = os.path.join(self._base_dir, "temp_dir/temp_dir1/temp_dir2")
   file_io.recursive_create_dir(dir_path)
   file_path = os.path.join(dir_path, "temp_file")
   file_io.FileIO(file_path, mode="w").write("testing")
   self.assertTrue(file_io.file_exists(file_path))
   file_io.delete_recursively(os.path.join(self._base_dir, "temp_dir"))
   self.assertFalse(file_io.file_exists(file_path))
开发者ID:JamesFysh,项目名称:tensorflow,代码行数:8,代码来源:file_io_test.py


示例5: _analyze

def _analyze(args, cell):
  # For now, always run python2. If needed we can run python3 when the current kernel
  # is py3. Since now our transform cannot work on py3 anyway, I would rather run
  # everything with python2.
  cmd_args = ['python', 'analyze.py', '--output', _abs_path(args['output'])]
  if args['cloud']:
    cmd_args.append('--cloud')

  training_data = args['training_data']
  if args['cloud']:
    tmpdir = os.path.join(args['output'], 'tmp')
  else:
    tmpdir = tempfile.mkdtemp()

  try:
    if isinstance(training_data, dict):
      if 'csv' in training_data and 'schema' in training_data:
        schema = training_data['schema']
        schema_file = _create_json_file(tmpdir, schema, 'schema.json')
        cmd_args.append('--csv=' + _abs_path(training_data['csv']))
        cmd_args.extend(['--schema', schema_file])
      elif 'bigquery_table' in training_data:
        cmd_args.extend(['--bigquery', training_data['bigquery_table']])
      elif 'bigquery_sql' in training_data:
        # see https://cloud.google.com/bigquery/querying-data#temporary_and_permanent_tables
        print('Creating temporary table that will be deleted in 24 hours')
        r = bq.Query(training_data['bigquery_sql']).execute().result()
        cmd_args.extend(['--bigquery', r.full_name])
      else:
        raise ValueError('Invalid training_data dict. '
                         'Requires either "csv_file_pattern" and "csv_schema", or "bigquery".')
    elif isinstance(training_data, google.datalab.ml.CsvDataSet):
      schema_file = _create_json_file(tmpdir, training_data.schema, 'schema.json')
      for file_name in training_data.input_files:
        cmd_args.append('--csv=' + _abs_path(file_name))

      cmd_args.extend(['--schema', schema_file])
    elif isinstance(training_data, google.datalab.ml.BigQueryDataSet):
      # TODO: Support query too once command line supports query.
      cmd_args.extend(['--bigquery', training_data.table])
    else:
      raise ValueError('Invalid training data. Requires either a dict, '
                       'a google.datalab.ml.CsvDataSet, or a google.datalab.ml.BigQueryDataSet.')

    features = args['features']
    features_file = _create_json_file(tmpdir, features, 'features.json')
    cmd_args.extend(['--features', features_file])

    if args['package']:
      code_path = os.path.join(tmpdir, 'package')
      _archive.extract_archive(args['package'], code_path)
    else:
      code_path = MLTOOLBOX_CODE_PATH

    _shell_process.run_and_monitor(cmd_args, os.getpid(), cwd=code_path)
  finally:
    file_io.delete_recursively(tmpdir)
开发者ID:javiervicho,项目名称:pydatalab,代码行数:57,代码来源:_ml.py


示例6: testGetMatchingFiles

 def testGetMatchingFiles(self):
     dir_path = os.path.join(self._base_dir, "temp_dir")
     file_io.create_dir(dir_path)
     files = ["file1.txt", "file2.txt", "file3.txt"]
     for name in files:
         file_path = os.path.join(dir_path, name)
         file_io.FileIO(file_path, mode="w").write("testing")
     expected_match = [os.path.join(dir_path, name) for name in files]
     self.assertItemsEqual(file_io.get_matching_files(os.path.join(dir_path, "file*.txt")), expected_match)
     file_io.delete_recursively(dir_path)
     self.assertFalse(file_io.file_exists(os.path.join(dir_path, "file3.txt")))
开发者ID:pronobis,项目名称:tensorflow,代码行数:11,代码来源:file_io_test.py


示例7: tearDownModule

def tearDownModule():
  file_io.delete_recursively(test.get_temp_dir())
开发者ID:Ajaycs99,项目名称:tensorflow,代码行数:2,代码来源:reader_test.py


示例8: tearDown

 def tearDown(self):
   file_io.delete_recursively(self._base_dir)
开发者ID:1000sprites,项目名称:tensorflow,代码行数:2,代码来源:file_io_test.py


示例9: testDeleteRecursivelyFail

 def testDeleteRecursivelyFail(self):
   fake_dir_path = os.path.join(self._base_dir, "temp_dir")
   with self.assertRaises(errors.NotFoundError):
     file_io.delete_recursively(fake_dir_path)
开发者ID:1000sprites,项目名称:tensorflow,代码行数:4,代码来源:file_io_test.py


示例10: export_fn

  def export_fn(estimator, export_dir_base, checkpoint_path=None, eval_result=None):
    with ops.Graph().as_default() as g:
      contrib_variables.create_global_step(g)

      input_ops = feature_transforms.build_csv_serving_tensors_for_training_step(
          args.analysis, features, schema, stats, keep_target)
      model_fn_ops = estimator._call_model_fn(input_ops.features,
                                              None,
                                              model_fn_lib.ModeKeys.INFER)
      output_fetch_tensors = make_prediction_output_tensors(
          args=args,
          features=features,
          input_ops=input_ops,
          model_fn_ops=model_fn_ops,
          keep_target=keep_target)

      # Don't use signature_def_utils.predict_signature_def as that renames
      # tensor names if there is only 1 input/output tensor!
      signature_inputs = {key: tf.saved_model.utils.build_tensor_info(tensor)
                          for key, tensor in six.iteritems(input_ops.default_inputs)}
      signature_outputs = {key: tf.saved_model.utils.build_tensor_info(tensor)
                           for key, tensor in six.iteritems(output_fetch_tensors)}
      signature_def_map = {
          'serving_default':
              signature_def_utils.build_signature_def(
                  signature_inputs,
                  signature_outputs,
                  tf.saved_model.signature_constants.PREDICT_METHOD_NAME)}

      if not checkpoint_path:
        # Locate the latest checkpoint
        checkpoint_path = saver.latest_checkpoint(estimator._model_dir)
      if not checkpoint_path:
        raise ValueError("Couldn't find trained model at %s."
                         % estimator._model_dir)

      export_dir = saved_model_export_utils.get_timestamped_export_dir(
          export_dir_base)

      if (model_fn_ops.scaffold is not None and
         model_fn_ops.scaffold.saver is not None):
        saver_for_restore = model_fn_ops.scaffold.saver
      else:
        saver_for_restore = saver.Saver(sharded=True)

      with tf_session.Session('') as session:
        saver_for_restore.restore(session, checkpoint_path)
        init_op = control_flow_ops.group(
            variables.local_variables_initializer(),
            resources.initialize_resources(resources.shared_resources()),
            tf.tables_initializer())

        # Perform the export
        builder = saved_model_builder.SavedModelBuilder(export_dir)
        builder.add_meta_graph_and_variables(
            session, [tag_constants.SERVING],
            signature_def_map=signature_def_map,
            assets_collection=ops.get_collection(
                ops.GraphKeys.ASSET_FILEPATHS),
            legacy_init_op=init_op)
        builder.save(False)

      # Add the extra assets
      if assets_extra:
        assets_extra_path = os.path.join(compat.as_bytes(export_dir),
                                         compat.as_bytes('assets.extra'))
        for dest_relative, source in assets_extra.items():
          dest_absolute = os.path.join(compat.as_bytes(assets_extra_path),
                                       compat.as_bytes(dest_relative))
          dest_path = os.path.dirname(dest_absolute)
          file_io.recursive_create_dir(dest_path)
          file_io.copy(source, dest_absolute)

    # only keep the last 3 models
    saved_model_export_utils.garbage_collect_exports(
        export_dir_base,
        exports_to_keep=3)

    # save the last model to the model folder.
    # export_dir_base = A/B/intermediate_models/
    if keep_target:
      final_dir = os.path.join(args.job_dir, 'evaluation_model')
    else:
      final_dir = os.path.join(args.job_dir, 'model')
    if file_io.is_directory(final_dir):
      file_io.delete_recursively(final_dir)
    file_io.recursive_create_dir(final_dir)
    recursive_copy(export_dir, final_dir)

    return export_dir
开发者ID:javiervicho,项目名称:pydatalab,代码行数:90,代码来源:task.py


示例11: delete_temp_dir

 def delete_temp_dir(dirname=temp_dir):
   try:
     file_io.delete_recursively(dirname)
   except errors.OpError as e:
     logging.error('Error removing %s: %s', dirname, e)
开发者ID:LUTAN,项目名称:tensorflow,代码行数:5,代码来源:googletest.py


示例12: create_dir_test

def create_dir_test():
  """Verifies file_io directory handling methods."""

  # Test directory creation.
  starttime_ms = int(round(time.time() * 1000))
  dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime_ms)
  print("Creating dir %s" % dir_name)
  file_io.create_dir(dir_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Created directory in: %d milliseconds" % elapsed_ms)

  # Check that the directory exists.
  dir_exists = file_io.is_directory(dir_name)
  assert dir_exists
  print("%s directory exists: %s" % (dir_name, dir_exists))

  # Test recursive directory creation.
  starttime_ms = int(round(time.time() * 1000))
  recursive_dir_name = "%s/%s/%s" % (dir_name,
                                     "nested_dir1",
                                     "nested_dir2")
  print("Creating recursive dir %s" % recursive_dir_name)
  file_io.recursive_create_dir(recursive_dir_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Created directory recursively in: %d milliseconds" % elapsed_ms)

  # Check that the directory exists.
  recursive_dir_exists = file_io.is_directory(recursive_dir_name)
  assert recursive_dir_exists
  print("%s directory exists: %s" % (recursive_dir_name, recursive_dir_exists))

  # Create some contents in the just created directory and list the contents.
  num_files = 10
  files_to_create = ["file_%d.txt" % n for n in range(num_files)]
  for file_num in files_to_create:
    file_name = "%s/%s" % (dir_name, file_num)
    print("Creating file %s." % file_name)
    file_io.write_string_to_file(file_name, "test file.")

  print("Listing directory %s." % dir_name)
  starttime_ms = int(round(time.time() * 1000))
  directory_contents = file_io.list_directory(dir_name)
  print(directory_contents)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Listed directory %s in %s milliseconds" % (dir_name, elapsed_ms))
  assert set(directory_contents) == set(files_to_create + ["nested_dir1/"])

  # Test directory renaming.
  dir_to_rename = "%s/old_dir" % dir_name
  new_dir_name = "%s/new_dir" % dir_name
  file_io.create_dir(dir_to_rename)
  assert file_io.is_directory(dir_to_rename)
  assert not file_io.is_directory(new_dir_name)

  starttime_ms = int(round(time.time() * 1000))
  print("Will try renaming directory %s to %s" % (dir_to_rename, new_dir_name))
  file_io.rename(dir_to_rename, new_dir_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Renamed directory %s to %s in %s milliseconds" % (
      dir_to_rename, new_dir_name, elapsed_ms))
  assert not file_io.is_directory(dir_to_rename)
  assert file_io.is_directory(new_dir_name)

  # Test Delete directory recursively.
  print("Deleting directory recursively %s." % dir_name)
  starttime_ms = int(round(time.time() * 1000))
  file_io.delete_recursively(dir_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  dir_exists = file_io.is_directory(dir_name)
  assert not dir_exists
  print("Deleted directory recursively %s in %s milliseconds" % (
      dir_name, elapsed_ms))
开发者ID:DILASSS,项目名称:tensorflow,代码行数:72,代码来源:gcs_smoke.py


示例13: create_object_test

def create_object_test():
  """Verifies file_io's object manipulation methods ."""
  starttime_ms = int(round(time.time() * 1000))
  dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime_ms)
  print("Creating dir %s." % dir_name)
  file_io.create_dir(dir_name)

  num_files = 5
  # Create files of 2 different patterns in this directory.
  files_pattern_1 = ["%s/test_file_%d.txt" % (dir_name, n)
                     for n in range(num_files)]
  files_pattern_2 = ["%s/testfile%d.txt" % (dir_name, n)
                     for n in range(num_files)]

  starttime_ms = int(round(time.time() * 1000))
  files_to_create = files_pattern_1 + files_pattern_2
  for file_name in files_to_create:
    print("Creating file %s." % file_name)
    file_io.write_string_to_file(file_name, "test file creation.")
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Created %d files in %s milliseconds" %
        (len(files_to_create), elapsed_ms))

  # Listing files of pattern1.
  list_files_pattern = "%s/test_file*.txt" % dir_name
  print("Getting files matching pattern %s." % list_files_pattern)
  starttime_ms = int(round(time.time() * 1000))
  files_list = file_io.get_matching_files(list_files_pattern)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Listed files in %s milliseconds" % elapsed_ms)
  print(files_list)
  assert set(files_list) == set(files_pattern_1)

  # Listing files of pattern2.
  list_files_pattern = "%s/testfile*.txt" % dir_name
  print("Getting files matching pattern %s." % list_files_pattern)
  starttime_ms = int(round(time.time() * 1000))
  files_list = file_io.get_matching_files(list_files_pattern)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("Listed files in %s milliseconds" % elapsed_ms)
  print(files_list)
  assert set(files_list) == set(files_pattern_2)

  # Test renaming file.
  file_to_rename = "%s/oldname.txt" % dir_name
  file_new_name = "%s/newname.txt" % dir_name
  file_io.write_string_to_file(file_to_rename, "test file.")
  assert file_io.file_exists(file_to_rename)
  assert not file_io.file_exists(file_new_name)

  print("Will try renaming file %s to %s" % (file_to_rename, file_new_name))
  starttime_ms = int(round(time.time() * 1000))
  file_io.rename(file_to_rename, file_new_name)
  elapsed_ms = int(round(time.time() * 1000)) - starttime_ms
  print("File %s renamed to %s in %s milliseconds" % (
      file_to_rename, file_new_name, elapsed_ms))
  assert not file_io.file_exists(file_to_rename)
  assert file_io.file_exists(file_new_name)

  # Delete directory.
  print("Deleting directory %s." % dir_name)
  file_io.delete_recursively(dir_name)
开发者ID:DILASSS,项目名称:tensorflow,代码行数:62,代码来源:gcs_smoke.py


示例14: delete_recursively

 def delete_recursively(cls, dirname):
     file_io.delete_recursively(dirname)
开发者ID:idil77soltahanov,项目名称:hugin-1,代码行数:2,代码来源:IOUtils.py


示例15: tearDown

 def tearDown(self):
   file_io.delete_recursively(test.get_temp_dir())
开发者ID:JonathanRaiman,项目名称:tensorflow,代码行数:2,代码来源:loader_test.py


示例16: export_fn

  def export_fn(estimator, export_dir_base, checkpoint_path=None, eval_result=None):
    with ops.Graph().as_default() as g:
      contrib_variables.create_global_step(g)

      input_ops = serving_from_csv_input(train_config, args, keep_target)
      model_fn_ops = estimator._call_model_fn(input_ops.features,
                                              None,
                                              model_fn_lib.ModeKeys.INFER)
      output_fetch_tensors = make_output_tensors(
          train_config=train_config,
          args=args,
          input_ops=input_ops,
          model_fn_ops=model_fn_ops,
          keep_target=keep_target)

      signature_def_map = {
        'serving_default': signature_def_utils.predict_signature_def(input_ops.default_inputs,
                                                                     output_fetch_tensors)
      }

      if not checkpoint_path:
        # Locate the latest checkpoint
        checkpoint_path = saver.latest_checkpoint(estimator._model_dir)
      if not checkpoint_path:
        raise NotFittedError("Couldn't find trained model at %s."
                             % estimator._model_dir)

      export_dir = saved_model_export_utils.get_timestamped_export_dir(
          export_dir_base)

      with tf_session.Session('') as session:
        # variables.initialize_local_variables()
        variables.local_variables_initializer()
        data_flow_ops.tables_initializer()
        saver_for_restore = saver.Saver(
            variables.global_variables(),
            sharded=True)
        saver_for_restore.restore(session, checkpoint_path)

        init_op = control_flow_ops.group(
            variables.local_variables_initializer(),
            data_flow_ops.tables_initializer())

        # Perform the export
        builder = saved_model_builder.SavedModelBuilder(export_dir)
        builder.add_meta_graph_and_variables(
            session, [tag_constants.SERVING],
            signature_def_map=signature_def_map,
            assets_collection=ops.get_collection(
                ops.GraphKeys.ASSET_FILEPATHS),
            legacy_init_op=init_op)
        builder.save(False)

      # Add the extra assets
      if assets_extra:
        assets_extra_path = os.path.join(compat.as_bytes(export_dir),
                                         compat.as_bytes('assets.extra'))
        for dest_relative, source in assets_extra.items():
          dest_absolute = os.path.join(compat.as_bytes(assets_extra_path),
                                       compat.as_bytes(dest_relative))
          dest_path = os.path.dirname(dest_absolute)
          gfile.MakeDirs(dest_path)
          gfile.Copy(source, dest_absolute)

    # only keep the last 3 models
    saved_model_export_utils.garbage_collect_exports(
        python_portable_string(export_dir_base),
        exports_to_keep=3)

    # save the last model to the model folder.
    # export_dir_base = A/B/intermediate_models/
    if keep_target:
      final_dir = os.path.join(args.job_dir, 'evaluation_model')
    else:
      final_dir = os.path.join(args.job_dir, 'model')
    if file_io.is_directory(final_dir):
      file_io.delete_recursively(final_dir)
    file_io.recursive_create_dir(final_dir)
    _recursive_copy(export_dir, final_dir)

    return export_dir
开发者ID:parthea,项目名称:pydatalab,代码行数:81,代码来源:util.py



注:本文中的tensorflow.python.lib.io.file_io.delete_recursively函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python file_io.file_exists函数代码示例发布时间:2022-05-27
下一篇:
Python file_io.delete_file函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap