• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python file_io.write_string_to_file函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tensorflow.python.lib.io.file_io.write_string_to_file函数的典型用法代码示例。如果您正苦于以下问题:Python write_string_to_file函数的具体用法?Python write_string_to_file怎么用?Python write_string_to_file使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了write_string_to_file函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: testRename

 def testRename(self):
   file_path = os.path.join(self._base_dir, "temp_file")
   file_io.write_string_to_file(file_path, "testing")
   rename_path = os.path.join(self._base_dir, "rename_file")
   file_io.rename(file_path, rename_path)
   self.assertTrue(file_io.file_exists(rename_path))
   self.assertFalse(file_io.file_exists(file_path))
开发者ID:AriaAsuka,项目名称:tensorflow,代码行数:7,代码来源:file_io_test.py


示例2: testCopyOverwriteFalse

 def testCopyOverwriteFalse(self):
   file_path = os.path.join(self._base_dir, "temp_file")
   file_io.write_string_to_file(file_path, "testing")
   copy_path = os.path.join(self._base_dir, "copy_file")
   file_io.write_string_to_file(copy_path, "copy")
   with self.assertRaises(errors.AlreadyExistsError):
     file_io.copy(file_path, copy_path, overwrite=False)
开发者ID:AriaAsuka,项目名称:tensorflow,代码行数:7,代码来源:file_io_test.py


示例3: _write_object_graph

def _write_object_graph(saveable_view, export_dir, asset_file_def_index):
  """Save a SavedObjectGraph proto for `root`."""
  # SavedObjectGraph is similar to the CheckpointableObjectGraph proto in the
  # checkpoint. It will eventually go into the SavedModel.
  proto = saved_object_graph_pb2.SavedObjectGraph()
  saveable_view.fill_object_graph_proto(proto)

  coder = nested_structure_coder.StructureCoder()
  for concrete_function in saveable_view.concrete_functions:
    serialized = function_serialization.serialize_concrete_function(
        concrete_function, saveable_view.captured_tensor_node_ids, coder)
    if serialized is not None:
      proto.concrete_functions[concrete_function.name].CopyFrom(
          serialized)

  for obj, obj_proto in zip(saveable_view.nodes, proto.nodes):
    _write_object_proto(obj, obj_proto, asset_file_def_index)

  extra_asset_dir = os.path.join(
      compat.as_bytes(export_dir),
      compat.as_bytes(constants.EXTRA_ASSETS_DIRECTORY))
  file_io.recursive_create_dir(extra_asset_dir)
  object_graph_filename = os.path.join(
      extra_asset_dir, compat.as_bytes("object_graph.pb"))
  file_io.write_string_to_file(object_graph_filename, proto.SerializeToString())
开发者ID:terrytangyuan,项目名称:tensorflow,代码行数:25,代码来源:save.py


示例4: save

  def save(self, as_text=False):
    """Writes a `SavedModel` protocol buffer to disk.

    The function writes the SavedModel protocol buffer to the export directory
    in serialized format.

    Args:
      as_text: Writes the SavedModel protocol buffer in text format to disk.

    Returns:
      The path to which the SavedModel protocol buffer was written.
    """
    if not file_io.file_exists(self._export_dir):
      file_io.recursive_create_dir(self._export_dir)

    if as_text:
      path = os.path.join(
          compat.as_bytes(self._export_dir),
          compat.as_bytes(constants.SAVED_MODEL_FILENAME_PBTXT))
      file_io.write_string_to_file(path, str(self._saved_model))
    else:
      path = os.path.join(
          compat.as_bytes(self._export_dir),
          compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
      file_io.write_string_to_file(path, self._saved_model.SerializeToString())
    tf_logging.info("SavedModel written to: %s", path)

    return path
开发者ID:1000sprites,项目名称:tensorflow,代码行数:28,代码来源:builder_impl.py


示例5: _export_model_json

def _export_model_json(model, saved_model_path):
  """Saves model configuration as a json string under assets folder."""
  model_json = model.to_json()
  model_json_filepath = os.path.join(
      saved_model_utils.get_or_create_assets_dir(saved_model_path),
      compat.as_text(constants.SAVED_MODEL_FILENAME_JSON))
  file_io.write_string_to_file(model_json_filepath, model_json)
开发者ID:adit-chandra,项目名称:tensorflow,代码行数:7,代码来源:saved_model.py


示例6: test_numerics

  def test_numerics(self):
    output_folder = tempfile.mkdtemp()
    input_file_path = tempfile.mkstemp(dir=output_folder)[1]
    try:
      file_io.write_string_to_file(
        input_file_path,
        '\n'.join(['%s,%s,%s' % (i, 10 * i + 0.5, i + 0.5) for i in range(100)]))

      schema = [{'name': 'col1', 'type': 'INTEGER'},
                {'name': 'col2', 'type': 'FLOAT'},
                {'name': 'col3', 'type': 'FLOAT'}]
      features = {'col1': {'transform': 'scale', 'source_column': 'col1'},
                  'col2': {'transform': 'identity', 'source_column': 'col2'},
                  'col3': {'transform': 'target'}}
      feature_analysis.run_local_analysis(
          output_folder, [input_file_path], schema, features)

      stats = json.loads(
          file_io.read_file_to_string(
              os.path.join(output_folder, analyze.constant.STATS_FILE)).decode())

      self.assertEqual(stats['num_examples'], 100)
      col = stats['column_stats']['col1']
      self.assertAlmostEqual(col['max'], 99.0)
      self.assertAlmostEqual(col['min'], 0.0)
      self.assertAlmostEqual(col['mean'], 49.5)

      col = stats['column_stats']['col2']
      self.assertAlmostEqual(col['max'], 990.5)
      self.assertAlmostEqual(col['min'], 0.5)
      self.assertAlmostEqual(col['mean'], 495.5)
    finally:
      shutil.rmtree(output_folder)
开发者ID:googledatalab,项目名称:pydatalab,代码行数:33,代码来源:test_analyze.py


示例7: update_renames_v2

def update_renames_v2(output_file_path):
  """Writes a Python dictionary mapping deprecated to canonical API names.

  Args:
    output_file_path: File path to write output to. Any existing contents
      would be replaced.
  """
  # Set of rename lines to write to output file in the form:
  #   'tf.deprecated_name': 'tf.canonical_name'
  rename_line_set = set()
  # _tf_api_names attribute name
  tensorflow_api_attr = tf_export.API_ATTRS[tf_export.TENSORFLOW_API_NAME].names

  def visit(unused_path, unused_parent, children):
    """Visitor that collects rename strings to add to rename_line_set."""
    for child in children:
      _, attr = tf_decorator.unwrap(child[1])
      if not hasattr(attr, '__dict__'):
        continue
      api_names = attr.__dict__.get(tensorflow_api_attr, [])
      deprecated_api_names = attr.__dict__.get('_tf_deprecated_api_names', [])
      canonical_name = tf_export.get_canonical_name(
          api_names, deprecated_api_names)
      for name in deprecated_api_names:
        rename_line_set.add('    \'tf.%s\': \'tf.%s\'' % (name, canonical_name))

  visitor = public_api.PublicAPIVisitor(visit)
  visitor.do_not_descend_map['tf'].append('contrib')
  traverse.traverse(tf, visitor)

  renames_file_text = '%srenames = {\n%s\n}\n' % (
      _FILE_HEADER, ',\n'.join(sorted(rename_line_set)))
  file_io.write_string_to_file(output_file_path, renames_file_text)
开发者ID:AnishShah,项目名称:tensorflow,代码行数:33,代码来源:generate_v2_renames_map.py


示例8: visualize_embeddings

def visualize_embeddings(summary_writer, config):
  """Stores a config file used by the embedding projector.

  Args:
    summary_writer: The summary writer used for writing events.
    config: `tf.contrib.tensorboard.plugins.projector.ProjectorConfig`
      proto that holds the configuration for the projector such as paths to
      checkpoint files and metadata files for the embeddings. If
      `config.model_checkpoint_path` is none, it defaults to the
      `logdir` used by the summary_writer.

  Raises:
    ValueError: If the summary writer does not have a `logdir`.
  """
  logdir = summary_writer.get_logdir()

  # Sanity checks.
  if logdir is None:
    raise ValueError('Summary writer must have a logdir')

  # Saving the config file in the logdir.
  config_pbtxt = text_format.MessageToString(config)
  # FYI - the 'projector_config.pbtxt' string is hardcoded in the projector
  # plugin.
  # TODO(dandelion): Restore this to a reference to the projector plugin
  file_io.write_string_to_file(
      os.path.join(logdir, 'projector_config.pbtxt'), config_pbtxt)
开发者ID:RubinLiao,项目名称:tensorflow,代码行数:27,代码来源:__init__.py


示例9: _write_object_graph

def _write_object_graph(saveable_view, export_dir, asset_file_def_index):
  """Save a SavedObjectGraph proto for `root`."""
  # SavedObjectGraph is similar to the CheckpointableObjectGraph proto in the
  # checkpoint. It will eventually go into the SavedModel.
  proto = saved_object_graph_pb2.SavedObjectGraph()
  saveable_view.fill_object_graph_proto(proto)

  node_ids = util.ObjectIdentityDictionary()
  for i, obj in enumerate(saveable_view.nodes):
    node_ids[obj] = i
    if resource_variable_ops.is_resource_variable(obj):
      node_ids[obj.handle] = i
    elif isinstance(obj, tracking.TrackableAsset):
      node_ids[obj.asset_path.handle] = i

  for obj, obj_proto in zip(saveable_view.nodes, proto.nodes):
    _write_object_proto(obj, obj_proto, asset_file_def_index, node_ids)

  extra_asset_dir = os.path.join(
      compat.as_bytes(export_dir),
      compat.as_bytes(constants.EXTRA_ASSETS_DIRECTORY))
  file_io.recursive_create_dir(extra_asset_dir)
  object_graph_filename = os.path.join(
      extra_asset_dir, compat.as_bytes("object_graph.pb"))
  file_io.write_string_to_file(object_graph_filename, proto.SerializeToString())
开发者ID:rmlarsen,项目名称:tensorflow,代码行数:25,代码来源:save.py


示例10: testFileWrite

 def testFileWrite(self):
   file_path = os.path.join(self.get_temp_dir(), "temp_file")
   file_io.write_string_to_file(file_path, "testing")
   self.assertTrue(file_io.file_exists(file_path))
   file_contents = file_io.read_file_to_string(file_path)
   self.assertEqual(b"testing", file_contents)
   file_io.delete_file(file_path)
开发者ID:AI-MR-Related,项目名称:tensorflow,代码行数:7,代码来源:file_io_test.py


示例11: run_analysis

def run_analysis(args):
  """Builds an analysis file for training.

  Uses BiqQuery tables to do the analysis.

  Args:
    args: command line args

  Raises:
    ValueError if schema contains unknown types.
  """
  import google.datalab.bigquery as bq
  if args.bigquery_table:
    table = bq.Table(args.bigquery_table)
    schema_list = table.schema._bq_schema
  else:
    schema_list = json.loads(
        file_io.read_file_to_string(args.schema_file).decode())
    table = bq.ExternalDataSource(
        source=args.input_file_pattern,
        schema=bq.Schema(schema_list))

  # Check the schema is supported.
  for col_schema in schema_list:
    col_type = col_schema['type'].lower()
    if col_type != 'string' and col_type != 'integer' and col_type != 'float':
      raise ValueError('Schema contains an unsupported type %s.' % col_type)

  run_numerical_analysis(table, schema_list, args)
  run_categorical_analysis(table, schema_list, args)

  # Save a copy of the schema to the output location.
  file_io.write_string_to_file(
      os.path.join(args.output_dir, SCHEMA_FILE),
      json.dumps(schema_list, indent=2, separators=(',', ': ')))
开发者ID:googledatalab,项目名称:pydatalab,代码行数:35,代码来源:cloud_preprocess.py


示例12: create_object_test

def create_object_test():
  """Verifies file_io's object manipulation methods ."""
  starttime = int(round(time.time() * 1000))
  dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime)
  print("Creating dir %s." % dir_name)
  file_io.create_dir(dir_name)

  # Create a file in this directory.
  file_name = "%s/test_file.txt" % dir_name
  print("Creating file %s." % file_name)
  file_io.write_string_to_file(file_name, "test file creation.")

  list_files_pattern = "%s/test_file*.txt" % dir_name
  print("Getting files matching pattern %s." % list_files_pattern)
  files_list = file_io.get_matching_files(list_files_pattern)
  print(files_list)

  assert len(files_list) == 1
  assert files_list[0] == file_name

  # Cleanup test files.
  print("Deleting file %s." % file_name)
  file_io.delete_file(file_name)

  # Delete directory.
  print("Deleting directory %s." % dir_name)
  file_io.delete_recursively(dir_name)
开发者ID:AutumnQYN,项目名称:tensorflow,代码行数:27,代码来源:gcs_smoke.py


示例13: testAssets

  def testAssets(self):
    export_dir = self._get_export_dir("test_assets")
    builder = saved_model_builder.SavedModelBuilder(export_dir)

    with self.test_session(graph=ops.Graph()) as sess:
      self._init_and_validate_variable(sess, "v", 42)

      # Build an asset collection.
      ignored_filepath = os.path.join(
          compat.as_bytes(test.get_temp_dir()), compat.as_bytes("ignored.txt"))
      file_io.write_string_to_file(ignored_filepath, "will be ignored")

      asset_collection = self._build_asset_collection("hello42.txt",
                                                      "foo bar baz",
                                                      "asset_file_tensor")

      builder.add_meta_graph_and_variables(
          sess, ["foo"], assets_collection=asset_collection)

    # Save the SavedModel to disk.
    builder.save()

    with self.test_session(graph=ops.Graph()) as sess:
      foo_graph = loader.load(sess, ["foo"], export_dir)
      self._validate_asset_collection(export_dir, foo_graph.collection_def,
                                      "hello42.txt", "foo bar baz",
                                      "asset_file_tensor:0")
      ignored_asset_path = os.path.join(
          compat.as_bytes(export_dir),
          compat.as_bytes(constants.ASSETS_DIRECTORY),
          compat.as_bytes("ignored.txt"))
      self.assertFalse(file_io.file_exists(ignored_asset_path))
开发者ID:KiaraStarlab,项目名称:tensorflow,代码行数:32,代码来源:saved_model_test.py


示例14: _build_asset_collection

 def _build_asset_collection(self, asset_file_name, asset_file_contents, asset_file_tensor_name):
     asset_filepath = os.path.join(compat.as_bytes(tf.test.get_temp_dir()), compat.as_bytes(asset_file_name))
     file_io.write_string_to_file(asset_filepath, asset_file_contents)
     asset_file_tensor = tf.constant(asset_filepath, name=asset_file_tensor_name)
     tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, asset_file_tensor)
     asset_collection = tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS)
     return asset_collection
开发者ID:botonchou,项目名称:tensorflow,代码行数:7,代码来源:saved_model_test.py


示例15: test_make_transform_graph_images

  def test_make_transform_graph_images(self):

    print('Testing make_transform_graph with image_to_vec.' +
          'It may take a few minutes because it needs to download a large inception checkpoint.')

    def _open_and_encode_image(img_url):
      with file_io.FileIO(img_url, 'r') as f:
        img = Image.open(f).convert('RGB')
        output = cStringIO.StringIO()
        img.save(output, 'jpeg')
      return base64.urlsafe_b64encode(output.getvalue())

    try:
      output_folder = tempfile.mkdtemp()
      stats_file_path = os.path.join(output_folder, feature_transforms.STATS_FILE)
      stats = {'column_stats': {}}
      file_io.write_string_to_file(stats_file_path, json.dumps(stats))

      schema = [{'name': 'img', 'type': 'STRING'}]
      features = {'img': {'transform': 'image_to_vec', 'source_column': 'img'}}

      img_string1 = _open_and_encode_image(
          'gs://cloud-ml-data/img/flower_photos/daisy/15207766_fc2f1d692c_n.jpg')
      img_string2 = _open_and_encode_image(
          'gs://cloud-ml-data/img/flower_photos/dandelion/8980164828_04fbf64f79_n.jpg')
      input_data = [img_string1, img_string2]
      results = self._run_graph(output_folder, features, schema, stats, input_data)
      embeddings = results['img']
      self.assertEqual(len(embeddings), 2)
      self.assertEqual(len(embeddings[0]), 2048)
      self.assertEqual(embeddings[0].dtype, np.float32)
      self.assertTrue(any(x != 0.0 for x in embeddings[1]))

    finally:
      shutil.rmtree(output_folder)
开发者ID:parthea,项目名称:pydatalab,代码行数:35,代码来源:test_feature_transforms.py


示例16: test_categorical

  def test_categorical(self):
    output_folder = tempfile.mkdtemp()
    input_file_path = tempfile.mkstemp(dir=output_folder)[1]
    try:
      csv_file = ['red,apple', 'red,pepper', 'red,apple', 'blue,grape',
                  'blue,apple', 'green,pepper']
      file_io.write_string_to_file(
        input_file_path,
        '\n'.join(csv_file))

      schema = [{'name': 'color', 'type': 'STRING'},
                {'name': 'type', 'type': 'STRING'}]
      features = {'color': {'transform': 'one_hot', 'source_column': 'color'},
                  'type': {'transform': 'target'}}
      feature_analysis.run_local_analysis(
        output_folder, [input_file_path], schema, features)

      stats = json.loads(
          file_io.read_file_to_string(
              os.path.join(output_folder, analyze.constant.STATS_FILE)).decode())
      self.assertEqual(stats['column_stats']['color']['vocab_size'], 3)

      # Color column.
      vocab_str = file_io.read_file_to_string(
        os.path.join(output_folder, analyze.constant.VOCAB_ANALYSIS_FILE % 'color'))
      vocab = pd.read_csv(six.StringIO(vocab_str),
                          header=None,
                          names=['color', 'count'])
      expected_vocab = pd.DataFrame(
          {'color': ['red', 'blue', 'green'], 'count': [3, 2, 1]},
          columns=['color', 'count'])
      pd.util.testing.assert_frame_equal(vocab, expected_vocab)

    finally:
      shutil.rmtree(output_folder)
开发者ID:googledatalab,项目名称:pydatalab,代码行数:35,代码来源:test_analyze.py


示例17: write_graph

def write_graph(graph_def, logdir, name, as_text=True):
  """Writes a graph proto to a file.

  The graph is written as a binary proto unless `as_text` is `True`.

  ```python
  v = tf.Variable(0, name='my_variable')
  sess = tf.Session()
  tf.train.write_graph(sess.graph_def, '/tmp/my-model', 'train.pbtxt')
  ```

  Args:
    graph_def: A `GraphDef` protocol buffer.
    logdir: Directory where to write the graph. This can refer to remote
      filesystems, such as Google Cloud Storage (GCS).
    name: Filename for the graph.
    as_text: If `True`, writes the graph as an ASCII proto.
  """
  # gcs does not have the concept of directory at the moment.
  if not file_io.file_exists(logdir) and not logdir.startswith("gs:"):
    file_io.recursive_create_dir(logdir)
  path = os.path.join(logdir, name)
  if as_text:
    file_io.write_string_to_file(path, str(graph_def))
  else:
    file_io.write_string_to_file(path, graph_def.SerializeToString())
开发者ID:2020zyc,项目名称:tensorflow,代码行数:26,代码来源:training_util.py


示例18: test_numerics

  def test_numerics(self):
    test_folder = os.path.join(self._bucket_root, 'test_numerics')
    input_file_path = os.path.join(test_folder, 'input.csv')
    output_folder = os.path.join(test_folder, 'test_output')
    file_io.recursive_create_dir(output_folder)

    file_io.write_string_to_file(
      input_file_path,
      '\n'.join(['%s,%s' % (i, 10 * i + 0.5) for i in range(100)]))

    schema = [{'name': 'col1', 'type': 'INTEGER'},
              {'name': 'col2', 'type': 'FLOAT'}]
    features = {'col1': {'transform': 'scale', 'source_column': 'col1'},
                'col2': {'transform': 'identity', 'source_column': 'col2'}}
    analyze.run_cloud_analysis(
        output_dir=output_folder,
        csv_file_pattern=input_file_path,
        bigquery_table=None,
        schema=schema,
        inverted_features=analyze.invert_features(features))

    stats = json.loads(
        file_io.read_file_to_string(
            os.path.join(output_folder, analyze.constant.STATS_FILE)).decode())

    self.assertEqual(stats['num_examples'], 100)
    col = stats['column_stats']['col1']
    self.assertAlmostEqual(col['max'], 99.0)
    self.assertAlmostEqual(col['min'], 0.0)
    self.assertAlmostEqual(col['mean'], 49.5)

    col = stats['column_stats']['col2']
    self.assertAlmostEqual(col['max'], 990.5)
    self.assertAlmostEqual(col['min'], 0.5)
    self.assertAlmostEqual(col['mean'], 495.5)
开发者ID:javiervicho,项目名称:pydatalab,代码行数:35,代码来源:test_analyze.py


示例19: testCopy

 def testCopy(self):
   file_path = os.path.join(self._base_dir, "temp_file")
   file_io.write_string_to_file(file_path, "testing")
   copy_path = os.path.join(self._base_dir, "copy_file")
   file_io.copy(file_path, copy_path)
   self.assertTrue(file_io.file_exists(copy_path))
   self.assertEqual(b"testing", file_io.read_file_to_string(file_path))
开发者ID:AriaAsuka,项目名称:tensorflow,代码行数:7,代码来源:file_io_test.py


示例20: testCreateRecursiveDir

 def testCreateRecursiveDir(self):
   dir_path = os.path.join(self._base_dir, "temp_dir/temp_dir1/temp_dir2")
   file_io.recursive_create_dir(dir_path)
   file_path = os.path.join(dir_path, "temp_file")
   file_io.write_string_to_file(file_path, "testing")
   self.assertTrue(file_io.file_exists(file_path))
   file_io.delete_recursively(os.path.join(self._base_dir, "temp_dir"))
   self.assertFalse(file_io.file_exists(file_path))
开发者ID:AriaAsuka,项目名称:tensorflow,代码行数:8,代码来源:file_io_test.py



注:本文中的tensorflow.python.lib.io.file_io.write_string_to_file函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python tf_record.tf_record_iterator函数代码示例发布时间:2022-05-27
下一篇:
Python file_io.rename函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap